[ 499.214838] env[68674]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=68674) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 499.215247] env[68674]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=68674) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 499.215289] env[68674]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=68674) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 499.215616] env[68674]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 499.312322] env[68674]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=68674) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:349}} [ 499.321987] env[68674]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=68674) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:372}} [ 499.365261] env[68674]: INFO oslo_service.periodic_task [-] Skipping periodic task _heal_instance_info_cache because its interval is negative [ 500.038065] env[68674]: INFO nova.virt.driver [None req-c56e7249-93c4-40f2-9be0-b25f4770be44 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 500.109020] env[68674]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 500.109192] env[68674]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 500.109289] env[68674]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=68674) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 503.022480] env[68674]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-389b3a38-f687-4eff-9a17-14de155ad3a5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 503.039285] env[68674]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=68674) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 503.039436] env[68674]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-975d1f09-295a-4020-9f65-83800d9a323c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 503.064171] env[68674]: INFO oslo_vmware.api [-] Successfully established new session; session ID is d5118. [ 503.064320] env[68674]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 2.955s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 503.064880] env[68674]: INFO nova.virt.vmwareapi.driver [None req-c56e7249-93c4-40f2-9be0-b25f4770be44 None None] VMware vCenter version: 7.0.3 [ 503.068414] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a357c65f-0a5e-48fd-a196-3903cddf86ad {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 503.086410] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5fa6f66-bf1e-4017-a6e3-ed01b716bc26 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 503.092330] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e13e48fc-389d-44a2-a6f7-f68e5746ea56 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 503.099025] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a614a3fd-8a52-42a9-9272-33782da942aa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 503.112243] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-893b2fa1-c9f6-45db-a651-fef79d30bec6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 503.118117] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e47f3c8-a93d-4cf8-916b-a8157bbe93bd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 503.148502] env[68674]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-cbaf10c6-c08d-4c52-bd27-8dbab6451c67 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 503.153747] env[68674]: DEBUG nova.virt.vmwareapi.driver [None req-c56e7249-93c4-40f2-9be0-b25f4770be44 None None] Extension org.openstack.compute already exists. {{(pid=68674) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 503.156454] env[68674]: INFO nova.compute.provider_config [None req-c56e7249-93c4-40f2-9be0-b25f4770be44 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 503.659730] env[68674]: DEBUG nova.context [None req-c56e7249-93c4-40f2-9be0-b25f4770be44 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),dfd5071a-d2e5-4061-abde-793fac989b97(cell1) {{(pid=68674) load_cells /opt/stack/nova/nova/context.py:464}} [ 503.661935] env[68674]: DEBUG oslo_concurrency.lockutils [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 503.662175] env[68674]: DEBUG oslo_concurrency.lockutils [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 503.662868] env[68674]: DEBUG oslo_concurrency.lockutils [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 503.663299] env[68674]: DEBUG oslo_concurrency.lockutils [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] Acquiring lock "dfd5071a-d2e5-4061-abde-793fac989b97" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 503.663488] env[68674]: DEBUG oslo_concurrency.lockutils [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] Lock "dfd5071a-d2e5-4061-abde-793fac989b97" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 503.664492] env[68674]: DEBUG oslo_concurrency.lockutils [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] Lock "dfd5071a-d2e5-4061-abde-793fac989b97" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 503.684451] env[68674]: INFO dbcounter [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] Registered counter for database nova_cell0 [ 503.692778] env[68674]: INFO dbcounter [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] Registered counter for database nova_cell1 [ 504.135880] env[68674]: DEBUG oslo_db.sqlalchemy.engines [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=68674) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 504.136312] env[68674]: DEBUG oslo_db.sqlalchemy.engines [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=68674) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 504.141372] env[68674]: ERROR nova.db.main.api [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 504.141372] env[68674]: result = function(*args, **kwargs) [ 504.141372] env[68674]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 504.141372] env[68674]: return func(*args, **kwargs) [ 504.141372] env[68674]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 504.141372] env[68674]: result = fn(*args, **kwargs) [ 504.141372] env[68674]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 504.141372] env[68674]: return f(*args, **kwargs) [ 504.141372] env[68674]: File "/opt/stack/nova/nova/objects/service.py", line 557, in _db_service_get_minimum_version [ 504.141372] env[68674]: return db.service_get_minimum_version(context, binaries) [ 504.141372] env[68674]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 504.141372] env[68674]: _check_db_access() [ 504.141372] env[68674]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 504.141372] env[68674]: stacktrace = ''.join(traceback.format_stack()) [ 504.141372] env[68674]: [ 504.142217] env[68674]: ERROR nova.db.main.api [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 504.142217] env[68674]: result = function(*args, **kwargs) [ 504.142217] env[68674]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 504.142217] env[68674]: return func(*args, **kwargs) [ 504.142217] env[68674]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 504.142217] env[68674]: result = fn(*args, **kwargs) [ 504.142217] env[68674]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 504.142217] env[68674]: return f(*args, **kwargs) [ 504.142217] env[68674]: File "/opt/stack/nova/nova/objects/service.py", line 557, in _db_service_get_minimum_version [ 504.142217] env[68674]: return db.service_get_minimum_version(context, binaries) [ 504.142217] env[68674]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 504.142217] env[68674]: _check_db_access() [ 504.142217] env[68674]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 504.142217] env[68674]: stacktrace = ''.join(traceback.format_stack()) [ 504.142217] env[68674]: [ 504.142628] env[68674]: WARNING nova.objects.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 504.142758] env[68674]: WARNING nova.objects.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] Failed to get minimum service version for cell dfd5071a-d2e5-4061-abde-793fac989b97 [ 504.143227] env[68674]: DEBUG oslo_concurrency.lockutils [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] Acquiring lock "singleton_lock" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 504.143384] env[68674]: DEBUG oslo_concurrency.lockutils [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] Acquired lock "singleton_lock" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 504.143623] env[68674]: DEBUG oslo_concurrency.lockutils [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] Releasing lock "singleton_lock" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 504.143952] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] Full set of CONF: {{(pid=68674) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/service.py:357}} [ 504.144111] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ******************************************************************************** {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 504.144240] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] Configuration options gathered from: {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 504.144377] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 504.144570] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 504.144698] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ================================================================================ {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 504.144910] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] allow_resize_to_same_host = True {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.145094] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] arq_binding_timeout = 300 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.145229] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] backdoor_port = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.145357] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] backdoor_socket = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.145521] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] block_device_allocate_retries = 60 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.145678] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] block_device_allocate_retries_interval = 3 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.145847] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cert = self.pem {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.146021] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.146196] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] compute_monitors = [] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.146367] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] config_dir = [] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.146792] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] config_drive_format = iso9660 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.146942] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.147133] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] config_source = [] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.147312] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] console_host = devstack {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.147484] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] control_exchange = nova {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.147647] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cpu_allocation_ratio = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.147810] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] daemon = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.147985] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] debug = True {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.148160] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] default_access_ip_network_name = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.148329] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] default_availability_zone = nova {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.148486] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] default_ephemeral_format = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.148684] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] default_green_pool_size = 1000 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.148923] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.149101] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] default_schedule_zone = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.149267] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] disk_allocation_ratio = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.149429] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] enable_new_services = True {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.149640] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] enabled_apis = ['osapi_compute'] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.149773] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] enabled_ssl_apis = [] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.149938] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] flat_injected = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.150108] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] force_config_drive = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.150269] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] force_raw_images = True {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.150438] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] graceful_shutdown_timeout = 5 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.150601] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] heal_instance_info_cache_interval = -1 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.150828] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] host = cpu-1 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.151056] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.151230] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] initial_disk_allocation_ratio = 1.0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.151396] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] initial_ram_allocation_ratio = 1.0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.151609] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.151772] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] instance_build_timeout = 0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.151931] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] instance_delete_interval = 300 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.152107] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] instance_format = [instance: %(uuid)s] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.152274] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] instance_name_template = instance-%08x {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.152432] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] instance_usage_audit = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.152598] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] instance_usage_audit_period = month {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.152792] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.152972] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] instances_path = /opt/stack/data/nova/instances {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.153151] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] internal_service_availability_zone = internal {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.153307] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] key = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.153464] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] live_migration_retry_count = 30 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.153635] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] log_color = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.153798] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] log_config_append = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.153961] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.154136] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] log_dir = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.154293] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] log_file = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.154420] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] log_options = True {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.154578] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] log_rotate_interval = 1 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.154742] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] log_rotate_interval_type = days {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.154906] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] log_rotation_type = none {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.155042] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.155169] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.155333] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.155495] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.155620] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.155778] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] long_rpc_timeout = 1800 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.155934] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] max_concurrent_builds = 10 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.156098] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] max_concurrent_live_migrations = 1 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.156255] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] max_concurrent_snapshots = 5 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.156408] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] max_local_block_devices = 3 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.156578] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] max_logfile_count = 30 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.156776] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] max_logfile_size_mb = 200 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.156940] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] maximum_instance_delete_attempts = 5 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.157125] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] metadata_listen = 0.0.0.0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.157291] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] metadata_listen_port = 8775 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.157458] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] metadata_workers = 2 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.157617] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] migrate_max_retries = -1 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.157781] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] mkisofs_cmd = genisoimage {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.157984] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] my_block_storage_ip = 10.180.1.21 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.158126] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] my_ip = 10.180.1.21 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.158329] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.158488] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] network_allocate_retries = 0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.158688] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.158865] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] osapi_compute_listen = 0.0.0.0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.159039] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] osapi_compute_listen_port = 8774 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.159236] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] osapi_compute_unique_server_name_scope = {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.159424] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] osapi_compute_workers = 2 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.159586] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] password_length = 12 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.159747] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] periodic_enable = True {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.159905] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] periodic_fuzzy_delay = 60 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.160158] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] pointer_model = usbtablet {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.160441] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] preallocate_images = none {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.160661] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] publish_errors = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.160800] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] pybasedir = /opt/stack/nova {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.160965] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ram_allocation_ratio = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.161149] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] rate_limit_burst = 0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.161305] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] rate_limit_except_level = CRITICAL {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.161463] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] rate_limit_interval = 0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.161619] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] reboot_timeout = 0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.161774] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] reclaim_instance_interval = 0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.161942] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] record = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.162144] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] reimage_timeout_per_gb = 60 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.162338] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] report_interval = 120 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.162507] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] rescue_timeout = 0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.162664] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] reserved_host_cpus = 0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.162822] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] reserved_host_disk_mb = 0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.162976] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] reserved_host_memory_mb = 512 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.163146] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] reserved_huge_pages = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.163307] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] resize_confirm_window = 0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.163466] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] resize_fs_using_block_device = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.163625] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] resume_guests_state_on_host_boot = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.163795] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.163957] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] rpc_response_timeout = 60 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.164128] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] run_external_periodic_tasks = True {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.164298] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] running_deleted_instance_action = reap {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.164456] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] running_deleted_instance_poll_interval = 1800 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.164614] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] running_deleted_instance_timeout = 0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.164803] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] scheduler_instance_sync_interval = 120 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.164991] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] service_down_time = 720 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.165177] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] servicegroup_driver = db {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.165413] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] shell_completion = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.165696] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] shelved_offload_time = 0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.165905] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] shelved_poll_interval = 3600 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.166100] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] shutdown_timeout = 0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.166271] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] source_is_ipv6 = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.166433] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ssl_only = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.166699] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.166870] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] sync_power_state_interval = 600 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.167042] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] sync_power_state_pool_size = 1000 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.167216] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] syslog_log_facility = LOG_USER {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.167372] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] tempdir = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.167533] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] timeout_nbd = 10 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.167699] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] transport_url = **** {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.167861] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] update_resources_interval = 0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.168033] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] use_cow_images = True {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.168197] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] use_journal = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.168355] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] use_json = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.168511] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] use_rootwrap_daemon = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.168686] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] use_stderr = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.168854] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] use_syslog = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.169024] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vcpu_pin_set = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.169190] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vif_plugging_is_fatal = True {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.169357] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vif_plugging_timeout = 300 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.169521] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] virt_mkfs = [] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.169681] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] volume_usage_poll_interval = 0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.169842] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] watch_log_file = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.170011] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] web = /usr/share/spice-html5 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 504.170205] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.170371] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.170533] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.170703] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_concurrency.disable_process_locking = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.171023] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.171249] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.171378] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.171549] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.171718] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.171886] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.172107] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api.auth_strategy = keystone {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.172279] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api.compute_link_prefix = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.172458] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.172634] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api.dhcp_domain = novalocal {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.172808] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api.enable_instance_password = True {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.172976] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api.glance_link_prefix = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.173156] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.173333] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.173502] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api.instance_list_per_project_cells = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.173678] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api.list_records_by_skipping_down_cells = True {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.173855] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api.local_metadata_per_cell = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.174037] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api.max_limit = 1000 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.174208] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api.metadata_cache_expiration = 15 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.174384] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api.neutron_default_tenant_id = default {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.174555] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api.response_validation = warn {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.174730] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api.use_neutron_default_nets = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.174907] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.175081] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.175252] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.175426] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.175594] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api.vendordata_dynamic_targets = [] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.175753] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api.vendordata_jsonfile_path = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.175933] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.176139] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.backend = dogpile.cache.memcached {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.176309] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.backend_argument = **** {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.176469] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.backend_expiration_time = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.176678] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.config_prefix = cache.oslo {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.176861] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.dead_timeout = 60.0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.177042] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.debug_cache_backend = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.177214] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.enable_retry_client = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.177379] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.enable_socket_keepalive = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.177552] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.enabled = True {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.177718] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.enforce_fips_mode = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.177886] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.expiration_time = 600 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.178058] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.hashclient_retry_attempts = 2 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.178228] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.hashclient_retry_delay = 1.0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.178392] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.memcache_dead_retry = 300 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.178548] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.memcache_password = **** {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.178738] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.178907] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.179079] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.memcache_pool_maxsize = 10 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.179241] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.179405] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.memcache_sasl_enabled = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.179584] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.179754] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.memcache_socket_timeout = 1.0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.179919] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.memcache_username = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.180095] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.proxies = [] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.180260] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.redis_db = 0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.180419] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.redis_password = **** {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.180590] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.redis_sentinel_service_name = mymaster {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.180766] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.180937] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.redis_server = localhost:6379 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.181119] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.redis_socket_timeout = 1.0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.181281] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.redis_username = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.181444] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.retry_attempts = 2 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.181608] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.retry_delay = 0.0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.181770] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.socket_keepalive_count = 1 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.181930] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.socket_keepalive_idle = 1 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.182098] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.socket_keepalive_interval = 1 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.182257] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.tls_allowed_ciphers = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.182411] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.tls_cafile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.182566] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.tls_certfile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.182727] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.tls_enabled = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.182884] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cache.tls_keyfile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.183063] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cinder.auth_section = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.183238] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cinder.auth_type = password {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.183398] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cinder.cafile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.183570] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cinder.catalog_info = volumev3::publicURL {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.183728] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cinder.certfile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.183894] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cinder.collect_timing = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.184065] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cinder.cross_az_attach = True {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.184231] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cinder.debug = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.184389] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cinder.endpoint_template = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.184552] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cinder.http_retries = 3 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.184714] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cinder.insecure = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.184872] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cinder.keyfile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.185050] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cinder.os_region_name = RegionOne {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.185215] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cinder.split_loggers = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.185374] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cinder.timeout = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.185545] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.185703] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] compute.cpu_dedicated_set = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.185862] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] compute.cpu_shared_set = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.186033] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] compute.image_type_exclude_list = [] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.186198] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.186360] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] compute.max_concurrent_disk_ops = 0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.186529] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] compute.max_disk_devices_to_attach = -1 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.186709] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.186881] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.187057] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] compute.resource_provider_association_refresh = 300 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.187221] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.187384] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] compute.shutdown_retry_interval = 10 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.187566] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.187746] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] conductor.workers = 2 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.187923] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] console.allowed_origins = [] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.188093] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] console.ssl_ciphers = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.188265] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] console.ssl_minimum_version = default {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.188433] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] consoleauth.enforce_session_timeout = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.188622] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] consoleauth.token_ttl = 600 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.188822] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cyborg.cafile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.188989] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cyborg.certfile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.189171] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cyborg.collect_timing = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.189331] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cyborg.connect_retries = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.189491] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cyborg.connect_retry_delay = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.189647] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cyborg.endpoint_override = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.189811] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cyborg.insecure = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.189966] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cyborg.keyfile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.190137] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cyborg.max_version = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.190293] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cyborg.min_version = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.190448] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cyborg.region_name = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.190605] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cyborg.retriable_status_codes = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.190760] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cyborg.service_name = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.190930] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cyborg.service_type = accelerator {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.191109] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cyborg.split_loggers = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.191268] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cyborg.status_code_retries = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.191462] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cyborg.status_code_retry_delay = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.191575] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cyborg.timeout = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.191753] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.191920] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] cyborg.version = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.192095] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] database.asyncio_connection = **** {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.192259] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] database.asyncio_slave_connection = **** {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.192426] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] database.backend = sqlalchemy {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.192595] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] database.connection = **** {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.192759] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] database.connection_debug = 0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.192930] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] database.connection_parameters = {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.193103] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] database.connection_recycle_time = 3600 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.193267] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] database.connection_trace = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.193429] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] database.db_inc_retry_interval = True {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.193589] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] database.db_max_retries = 20 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.193749] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] database.db_max_retry_interval = 10 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.193910] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] database.db_retry_interval = 1 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.194080] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] database.max_overflow = 50 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.194243] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] database.max_pool_size = 5 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.194400] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] database.max_retries = 10 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.194569] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.194726] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] database.mysql_wsrep_sync_wait = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.194883] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] database.pool_timeout = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.195051] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] database.retry_interval = 10 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.195209] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] database.slave_connection = **** {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.195371] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] database.sqlite_synchronous = True {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.195542] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] database.use_db_reconnect = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.195691] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api_database.asyncio_connection = **** {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.195847] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api_database.asyncio_slave_connection = **** {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.196018] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api_database.backend = sqlalchemy {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.196190] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api_database.connection = **** {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.196352] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api_database.connection_debug = 0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.196528] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api_database.connection_parameters = {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.196706] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api_database.connection_recycle_time = 3600 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.196872] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api_database.connection_trace = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.197043] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api_database.db_inc_retry_interval = True {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.197206] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api_database.db_max_retries = 20 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.197366] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api_database.db_max_retry_interval = 10 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.197526] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api_database.db_retry_interval = 1 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.197685] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api_database.max_overflow = 50 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.197846] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api_database.max_pool_size = 5 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.198014] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api_database.max_retries = 10 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.198187] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.198345] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.198502] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api_database.pool_timeout = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.198686] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api_database.retry_interval = 10 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.198850] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api_database.slave_connection = **** {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.199023] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] api_database.sqlite_synchronous = True {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.199200] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] devices.enabled_mdev_types = [] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.199379] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.199550] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ephemeral_storage_encryption.default_format = luks {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.199717] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ephemeral_storage_encryption.enabled = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.199902] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.200066] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] glance.api_servers = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.200237] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] glance.cafile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.200401] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] glance.certfile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.200566] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] glance.collect_timing = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.200736] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] glance.connect_retries = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.200921] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] glance.connect_retry_delay = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.201117] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] glance.debug = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.201292] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] glance.default_trusted_certificate_ids = [] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.201458] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] glance.enable_certificate_validation = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.201624] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] glance.enable_rbd_download = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.201785] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] glance.endpoint_override = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.201968] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] glance.insecure = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.202173] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] glance.keyfile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.202342] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] glance.max_version = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.202502] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] glance.min_version = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.202666] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] glance.num_retries = 3 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.202840] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] glance.rbd_ceph_conf = {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.203017] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] glance.rbd_connect_timeout = 5 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.203185] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] glance.rbd_pool = {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.203351] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] glance.rbd_user = {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.203517] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] glance.region_name = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.203676] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] glance.retriable_status_codes = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.203834] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] glance.service_name = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.204006] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] glance.service_type = image {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.204176] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] glance.split_loggers = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.204334] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] glance.status_code_retries = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.204494] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] glance.status_code_retry_delay = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.204652] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] glance.timeout = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.204835] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.204998] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] glance.verify_glance_signatures = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.205169] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] glance.version = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.205336] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] guestfs.debug = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.205505] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] manila.auth_section = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.205668] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] manila.auth_type = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.205828] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] manila.cafile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.205983] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] manila.certfile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.206160] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] manila.collect_timing = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.206321] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] manila.connect_retries = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.206480] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] manila.connect_retry_delay = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.206639] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] manila.endpoint_override = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.206803] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] manila.insecure = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.206960] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] manila.keyfile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.207132] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] manila.max_version = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.207292] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] manila.min_version = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.207452] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] manila.region_name = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.207610] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] manila.retriable_status_codes = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.207766] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] manila.service_name = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.207936] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] manila.service_type = shared-file-system {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.208109] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] manila.share_apply_policy_timeout = 10 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.208273] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] manila.split_loggers = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.208439] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] manila.status_code_retries = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.208586] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] manila.status_code_retry_delay = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.208770] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] manila.timeout = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.208956] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.209129] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] manila.version = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.209353] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] mks.enabled = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.209789] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.210033] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] image_cache.manager_interval = 2400 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.210244] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] image_cache.precache_concurrency = 1 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.210441] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] image_cache.remove_unused_base_images = True {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.210618] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.210789] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.210970] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] image_cache.subdirectory_name = _base {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.211161] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ironic.api_max_retries = 60 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.211325] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ironic.api_retry_interval = 2 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.211483] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ironic.auth_section = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.211650] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ironic.auth_type = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.211801] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ironic.cafile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.211960] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ironic.certfile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.212137] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ironic.collect_timing = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.212300] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ironic.conductor_group = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.212457] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ironic.connect_retries = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.212615] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ironic.connect_retry_delay = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.212773] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ironic.endpoint_override = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.212933] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ironic.insecure = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.213099] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ironic.keyfile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.213256] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ironic.max_version = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.213409] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ironic.min_version = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.213568] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ironic.peer_list = [] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.213723] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ironic.region_name = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.213879] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ironic.retriable_status_codes = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.214047] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ironic.serial_console_state_timeout = 10 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.214206] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ironic.service_name = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.214372] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ironic.service_type = baremetal {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.214529] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ironic.shard = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.214689] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ironic.split_loggers = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.214845] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ironic.status_code_retries = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.215013] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ironic.status_code_retry_delay = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.215169] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ironic.timeout = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.215349] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.215508] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ironic.version = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.215691] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.215860] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] key_manager.fixed_key = **** {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.216051] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.216222] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] barbican.barbican_api_version = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.216415] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] barbican.barbican_endpoint = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.216591] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] barbican.barbican_endpoint_type = public {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.216750] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] barbican.barbican_region_name = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.216910] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] barbican.cafile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.217076] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] barbican.certfile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.217241] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] barbican.collect_timing = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.217402] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] barbican.insecure = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.217557] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] barbican.keyfile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.217716] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] barbican.number_of_retries = 60 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.217876] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] barbican.retry_delay = 1 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.218046] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] barbican.send_service_user_token = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.218210] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] barbican.split_loggers = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.218365] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] barbican.timeout = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.218525] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] barbican.verify_ssl = True {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.218703] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] barbican.verify_ssl_path = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.218881] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] barbican_service_user.auth_section = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.219056] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] barbican_service_user.auth_type = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.219218] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] barbican_service_user.cafile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.219376] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] barbican_service_user.certfile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.219538] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] barbican_service_user.collect_timing = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.219733] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] barbican_service_user.insecure = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.219905] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] barbican_service_user.keyfile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.220081] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] barbican_service_user.split_loggers = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.220242] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] barbican_service_user.timeout = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.220406] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vault.approle_role_id = **** {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.220563] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vault.approle_secret_id = **** {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.220732] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vault.kv_mountpoint = secret {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.220893] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vault.kv_path = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.221065] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vault.kv_version = 2 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.221225] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vault.namespace = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.221381] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vault.root_token_id = **** {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.221535] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vault.ssl_ca_crt_file = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.221749] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vault.timeout = 60.0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.221864] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vault.use_ssl = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.222051] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.222224] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] keystone.cafile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.222384] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] keystone.certfile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.222550] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] keystone.collect_timing = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.222710] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] keystone.connect_retries = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.222869] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] keystone.connect_retry_delay = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.223037] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] keystone.endpoint_override = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.223204] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] keystone.insecure = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.223360] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] keystone.keyfile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.223514] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] keystone.max_version = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.223669] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] keystone.min_version = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.223826] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] keystone.region_name = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.223983] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] keystone.retriable_status_codes = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.224152] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] keystone.service_name = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.224322] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] keystone.service_type = identity {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.224482] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] keystone.split_loggers = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.224638] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] keystone.status_code_retries = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.224796] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] keystone.status_code_retry_delay = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.224952] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] keystone.timeout = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.225143] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.225302] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] keystone.version = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.225490] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.ceph_mount_options = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.225889] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.ceph_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.226091] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.connection_uri = {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.226262] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.cpu_mode = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.226443] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.cpu_model_extra_flags = [] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.226617] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.cpu_models = [] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.226794] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.cpu_power_governor_high = performance {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.226966] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.cpu_power_governor_low = powersave {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.227145] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.cpu_power_management = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.227321] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.227492] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.device_detach_attempts = 8 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.227661] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.device_detach_timeout = 20 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.227832] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.disk_cachemodes = [] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.227994] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.disk_prefix = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.228172] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.enabled_perf_events = [] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.228335] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.file_backed_memory = 0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.228499] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.gid_maps = [] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.228676] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.hw_disk_discard = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.228848] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.hw_machine_type = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.229035] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.images_rbd_ceph_conf = {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.229205] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.229368] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.229536] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.images_rbd_glance_store_name = {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.229726] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.images_rbd_pool = rbd {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.229904] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.images_type = default {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.230076] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.images_volume_group = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.230243] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.inject_key = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.230406] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.inject_partition = -2 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.230571] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.inject_password = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.230735] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.iscsi_iface = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.230899] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.iser_use_multipath = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.231073] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.live_migration_bandwidth = 0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.231238] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.231400] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.live_migration_downtime = 500 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.231561] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.231741] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.231914] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.live_migration_inbound_addr = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.232092] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.232260] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.live_migration_permit_post_copy = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.232421] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.live_migration_scheme = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.232594] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.live_migration_timeout_action = abort {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.232760] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.live_migration_tunnelled = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.232924] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.live_migration_uri = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.233097] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.live_migration_with_native_tls = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.233259] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.max_queues = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.233422] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.233647] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.233814] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.nfs_mount_options = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.234108] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.234354] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.234451] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.num_iser_scan_tries = 5 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.234613] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.num_memory_encrypted_guests = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.234774] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.234937] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.num_pcie_ports = 0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.235116] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.num_volume_scan_tries = 5 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.235284] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.pmem_namespaces = [] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.235445] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.quobyte_client_cfg = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.235733] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.235917] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.rbd_connect_timeout = 5 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.236096] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.236262] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.236424] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.rbd_secret_uuid = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.236583] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.rbd_user = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.236744] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.236918] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.remote_filesystem_transport = ssh {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.237091] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.rescue_image_id = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.237250] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.rescue_kernel_id = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.237407] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.rescue_ramdisk_id = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.237575] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.237736] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.rx_queue_size = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.237904] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.smbfs_mount_options = {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.238209] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.238389] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.snapshot_compression = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.238551] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.snapshot_image_format = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.238812] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.238983] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.sparse_logical_volumes = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.239162] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.swtpm_enabled = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.239334] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.swtpm_group = tss {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.239504] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.swtpm_user = tss {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.239698] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.sysinfo_serial = unique {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.239894] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.tb_cache_size = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.240080] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.tx_queue_size = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.240251] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.uid_maps = [] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.240417] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.use_virtio_for_bridges = True {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.240590] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.virt_type = kvm {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.240761] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.volume_clear = zero {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.240951] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.volume_clear_size = 0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.241136] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.volume_enforce_multipath = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.241307] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.volume_use_multipath = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.241467] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.vzstorage_cache_path = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.241637] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.241807] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.vzstorage_mount_group = qemu {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.241972] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.vzstorage_mount_opts = [] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.242154] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.242435] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.242617] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.vzstorage_mount_user = stack {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.242783] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.242956] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] neutron.auth_section = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.243143] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] neutron.auth_type = password {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.243307] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] neutron.cafile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.243467] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] neutron.certfile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.243629] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] neutron.collect_timing = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.243814] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] neutron.connect_retries = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.243983] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] neutron.connect_retry_delay = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.244172] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] neutron.default_floating_pool = public {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.244335] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] neutron.endpoint_override = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.244496] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] neutron.extension_sync_interval = 600 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.244657] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] neutron.http_retries = 3 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.244822] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] neutron.insecure = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.244981] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] neutron.keyfile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.245154] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] neutron.max_version = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.245325] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.245483] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] neutron.min_version = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.245653] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] neutron.ovs_bridge = br-int {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.245819] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] neutron.physnets = [] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.245991] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] neutron.region_name = RegionOne {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.246163] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] neutron.retriable_status_codes = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.246334] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] neutron.service_metadata_proxy = True {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.246494] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] neutron.service_name = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.246673] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] neutron.service_type = network {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.246833] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] neutron.split_loggers = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.246991] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] neutron.status_code_retries = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.247163] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] neutron.status_code_retry_delay = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.247320] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] neutron.timeout = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.247500] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.247664] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] neutron.version = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.247836] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] notifications.bdms_in_notifications = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.248020] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] notifications.default_level = INFO {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.248190] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] notifications.include_share_mapping = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.248365] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] notifications.notification_format = unversioned {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.248527] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] notifications.notify_on_state_change = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.248730] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.248908] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] pci.alias = [] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.249088] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] pci.device_spec = [] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.249255] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] pci.report_in_placement = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.249427] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.auth_section = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.249629] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.auth_type = password {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.249799] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.249965] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.cafile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.250138] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.certfile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.250367] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.collect_timing = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.250463] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.connect_retries = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.250625] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.connect_retry_delay = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.250784] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.default_domain_id = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.250949] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.default_domain_name = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.251137] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.domain_id = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.251298] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.domain_name = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.251457] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.endpoint_override = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.251618] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.insecure = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.251782] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.keyfile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.252010] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.max_version = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.252131] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.min_version = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.252274] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.password = **** {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.252432] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.project_domain_id = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.252598] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.project_domain_name = Default {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.252763] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.project_id = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.252936] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.project_name = service {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.253138] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.region_name = RegionOne {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.253306] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.retriable_status_codes = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.253475] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.service_name = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.253642] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.service_type = placement {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.253810] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.split_loggers = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.253969] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.status_code_retries = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.254141] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.status_code_retry_delay = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.254300] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.system_scope = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.254460] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.timeout = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.254618] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.trust_id = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.254784] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.user_domain_id = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.254955] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.user_domain_name = Default {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.255124] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.user_id = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.255296] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.username = nova {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.255475] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.255637] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] placement.version = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.255842] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] quota.cores = 20 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.256028] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] quota.count_usage_from_placement = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.256209] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.256379] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] quota.injected_file_content_bytes = 10240 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.256554] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] quota.injected_file_path_length = 255 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.256716] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] quota.injected_files = 5 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.256881] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] quota.instances = 10 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.257056] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] quota.key_pairs = 100 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.257226] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] quota.metadata_items = 128 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.257392] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] quota.ram = 51200 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.257556] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] quota.recheck_quota = True {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.257723] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] quota.server_group_members = 10 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.257889] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] quota.server_groups = 10 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.258110] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] quota.unified_limits_resource_list = ['servers'] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.258317] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] quota.unified_limits_resource_strategy = require {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.258547] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.258747] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.258922] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] scheduler.image_metadata_prefilter = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.259104] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.259272] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] scheduler.max_attempts = 3 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.259436] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] scheduler.max_placement_results = 1000 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.259602] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.259785] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] scheduler.query_placement_for_image_type_support = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.259954] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.260144] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] scheduler.workers = 2 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.260326] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.260499] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.260679] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.260850] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.261034] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.261207] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.261370] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.261556] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.261722] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] filter_scheduler.host_subset_size = 1 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.261886] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.262054] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.262218] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] filter_scheduler.image_props_weight_multiplier = 0.0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.262379] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] filter_scheduler.image_props_weight_setting = [] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.262552] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.262715] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] filter_scheduler.isolated_hosts = [] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.262880] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] filter_scheduler.isolated_images = [] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.263058] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.263223] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.263386] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.263548] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] filter_scheduler.pci_in_placement = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.263709] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.263873] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.264044] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.264210] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.264372] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.264535] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.264695] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] filter_scheduler.track_instance_changes = True {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.264871] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.265050] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] metrics.required = True {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.265218] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] metrics.weight_multiplier = 1.0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.265381] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.265546] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] metrics.weight_setting = [] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.265855] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.266043] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] serial_console.enabled = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.266224] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] serial_console.port_range = 10000:20000 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.266398] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.266565] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.266743] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] serial_console.serialproxy_port = 6083 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.266915] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] service_user.auth_section = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.267102] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] service_user.auth_type = password {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.267267] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] service_user.cafile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.267425] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] service_user.certfile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.267589] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] service_user.collect_timing = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.267757] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] service_user.insecure = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.267937] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] service_user.keyfile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.268124] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] service_user.send_service_user_token = True {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.268289] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] service_user.split_loggers = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.268447] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] service_user.timeout = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.268628] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] spice.agent_enabled = True {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.268806] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] spice.enabled = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.269123] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.269326] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.269499] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] spice.html5proxy_port = 6082 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.269665] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] spice.image_compression = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.269825] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] spice.jpeg_compression = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.269984] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] spice.playback_compression = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.270160] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] spice.require_secure = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.270331] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] spice.server_listen = 127.0.0.1 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.270501] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.270780] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] spice.spice_direct_proxy_base_url = http://127.0.0.1:13002/nova {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.270954] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] spice.streaming_mode = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.271131] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] spice.zlib_compression = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.271303] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] upgrade_levels.baseapi = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.271476] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] upgrade_levels.compute = auto {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.271639] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] upgrade_levels.conductor = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.271800] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] upgrade_levels.scheduler = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.271968] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vendordata_dynamic_auth.auth_section = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.272145] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vendordata_dynamic_auth.auth_type = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.272306] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vendordata_dynamic_auth.cafile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.272464] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vendordata_dynamic_auth.certfile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.272626] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.272786] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vendordata_dynamic_auth.insecure = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.272943] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vendordata_dynamic_auth.keyfile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.273114] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.273274] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vendordata_dynamic_auth.timeout = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.273444] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vmware.api_retry_count = 10 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.273601] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vmware.ca_file = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.273785] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vmware.cache_prefix = devstack-image-cache {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.273968] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vmware.cluster_name = testcl1 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.274148] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vmware.connection_pool_size = 10 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.274310] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vmware.console_delay_seconds = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.274481] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vmware.datastore_regex = ^datastore.* {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.274691] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.274880] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vmware.host_password = **** {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.275061] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vmware.host_port = 443 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.275236] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vmware.host_username = administrator@vsphere.local {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.275408] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vmware.insecure = True {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.275572] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vmware.integration_bridge = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.275737] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vmware.maximum_objects = 100 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.275956] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vmware.pbm_default_policy = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.276154] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vmware.pbm_enabled = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.276320] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vmware.pbm_wsdl_location = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.276492] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.276654] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vmware.serial_port_proxy_uri = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.276815] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vmware.serial_port_service_uri = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.276982] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vmware.task_poll_interval = 0.5 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.277171] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vmware.use_linked_clone = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.277342] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vmware.vnc_keymap = en-us {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.277510] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vmware.vnc_port = 5900 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.277674] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vmware.vnc_port_total = 10000 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.277868] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vnc.auth_schemes = ['none'] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.278051] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vnc.enabled = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.278346] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.278532] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.278736] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vnc.novncproxy_port = 6080 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.278938] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vnc.server_listen = 127.0.0.1 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.279137] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.279304] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vnc.vencrypt_ca_certs = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.279462] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vnc.vencrypt_client_cert = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.279630] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vnc.vencrypt_client_key = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.279844] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.280030] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] workarounds.disable_deep_image_inspection = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.280200] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.280364] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.280550] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.280725] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] workarounds.disable_rootwrap = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.280909] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] workarounds.enable_numa_live_migration = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.281118] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.281288] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.281452] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.281612] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] workarounds.libvirt_disable_apic = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.281773] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.281936] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.282262] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.282443] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.282614] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.282783] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.282947] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.283125] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.283291] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.283458] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.283644] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.283815] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] wsgi.client_socket_timeout = 900 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.283983] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] wsgi.default_pool_size = 1000 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.284160] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] wsgi.keep_alive = True {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.284328] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] wsgi.max_header_line = 16384 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.284490] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] wsgi.secure_proxy_ssl_header = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.284650] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] wsgi.ssl_ca_file = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.284812] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] wsgi.ssl_cert_file = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.284971] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] wsgi.ssl_key_file = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.285147] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] wsgi.tcp_keepidle = 600 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.285323] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.285489] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] zvm.ca_file = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.285650] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] zvm.cloud_connector_url = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.285941] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.286129] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] zvm.reachable_timeout = 300 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.286304] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.286506] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.286664] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] profiler.connection_string = messaging:// {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.286829] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] profiler.enabled = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.287014] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] profiler.es_doc_type = notification {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.287177] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] profiler.es_scroll_size = 10000 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.287347] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] profiler.es_scroll_time = 2m {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.287511] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] profiler.filter_error_trace = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.287680] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] profiler.hmac_keys = **** {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.287852] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] profiler.sentinel_service_name = mymaster {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.288026] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] profiler.socket_timeout = 0.1 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.288195] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] profiler.trace_requests = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.288357] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] profiler.trace_sqlalchemy = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.288537] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] profiler_jaeger.process_tags = {} {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.288728] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] profiler_jaeger.service_name_prefix = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.288903] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] profiler_otlp.service_name_prefix = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.289081] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] remote_debug.host = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.289245] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] remote_debug.port = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.289421] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.289585] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.289794] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.289969] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.290147] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.290310] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.290471] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.290632] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.290794] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.290985] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.291177] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.291351] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.291521] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.291687] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.291850] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.kombu_reconnect_splay = 0.0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.292028] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.292195] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.292361] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.292532] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.292696] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.292859] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.293034] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.293204] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.293366] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.293526] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.293687] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.293850] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.294016] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.294181] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.294345] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.294507] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.ssl = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.294678] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.294848] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.295018] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.295187] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.295355] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.ssl_version = {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.295515] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.295699] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.295867] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_notifications.retry = -1 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.296054] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.296234] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_messaging_notifications.transport_url = **** {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.296406] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_limit.auth_section = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.296616] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_limit.auth_type = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.296727] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_limit.cafile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.296886] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_limit.certfile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.297061] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_limit.collect_timing = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.297224] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_limit.connect_retries = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.297383] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_limit.connect_retry_delay = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.297542] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_limit.endpoint_id = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.297782] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_limit.endpoint_interface = publicURL {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.298025] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_limit.endpoint_override = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.298207] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_limit.endpoint_region_name = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.298372] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_limit.endpoint_service_name = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.298535] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_limit.endpoint_service_type = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.298729] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_limit.insecure = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.298905] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_limit.keyfile = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.299082] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_limit.max_version = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.299245] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_limit.min_version = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.299405] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_limit.region_name = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.299567] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_limit.retriable_status_codes = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.299755] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_limit.service_name = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.299924] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_limit.service_type = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.300102] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_limit.split_loggers = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.300269] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_limit.status_code_retries = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.300431] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_limit.status_code_retry_delay = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.300592] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_limit.timeout = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.300753] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_limit.valid_interfaces = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.300916] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_limit.version = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.301130] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_reports.file_event_handler = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.301305] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.301467] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] oslo_reports.log_dir = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.301642] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.301836] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.302012] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.302191] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.302358] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.302525] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.302694] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.302854] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vif_plug_ovs_privileged.group = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.303019] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.303189] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.303353] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.303510] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] vif_plug_ovs_privileged.user = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.303681] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] os_vif_linux_bridge.flat_interface = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.303864] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.304055] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.304234] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.304405] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.304578] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.304744] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.304906] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.305099] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.305276] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] os_vif_ovs.isolate_vif = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.305452] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.305618] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.305793] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.305963] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] os_vif_ovs.ovsdb_interface = native {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.306143] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] os_vif_ovs.per_port_bridge = False {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.306317] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] privsep_osbrick.capabilities = [21] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.306478] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] privsep_osbrick.group = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.306639] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] privsep_osbrick.helper_command = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.306807] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.306970] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.307140] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] privsep_osbrick.user = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.307313] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.307472] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] nova_sys_admin.group = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.307630] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] nova_sys_admin.helper_command = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.307797] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.307958] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.308128] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] nova_sys_admin.user = None {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 504.308258] env[68674]: DEBUG oslo_service.backend.eventlet.service [None req-df5db7e7-1a6f-413a-b003-cfce10611c06 None None] ******************************************************************************** {{(pid=68674) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 504.308693] env[68674]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 504.811661] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ed5ad021-2190-447f-8855-4a9f507c10db None None] Getting list of instances from cluster (obj){ [ 504.811661] env[68674]: value = "domain-c8" [ 504.811661] env[68674]: _type = "ClusterComputeResource" [ 504.811661] env[68674]: } {{(pid=68674) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 504.812735] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35a95a3f-8fab-457b-a7e0-9a6b7e9eaf21 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 504.822033] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ed5ad021-2190-447f-8855-4a9f507c10db None None] Got total of 0 instances {{(pid=68674) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 504.822602] env[68674]: WARNING nova.virt.vmwareapi.driver [None req-ed5ad021-2190-447f-8855-4a9f507c10db None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 504.823096] env[68674]: INFO nova.virt.node [None req-ed5ad021-2190-447f-8855-4a9f507c10db None None] Generated node identity ade3f042-7427-494b-9654-0b65e074850c [ 504.823329] env[68674]: INFO nova.virt.node [None req-ed5ad021-2190-447f-8855-4a9f507c10db None None] Wrote node identity ade3f042-7427-494b-9654-0b65e074850c to /opt/stack/data/n-cpu-1/compute_id [ 505.326605] env[68674]: WARNING nova.compute.manager [None req-ed5ad021-2190-447f-8855-4a9f507c10db None None] Compute nodes ['ade3f042-7427-494b-9654-0b65e074850c'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 506.334062] env[68674]: INFO nova.compute.manager [None req-ed5ad021-2190-447f-8855-4a9f507c10db None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 507.338854] env[68674]: WARNING nova.compute.manager [None req-ed5ad021-2190-447f-8855-4a9f507c10db None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 507.339218] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ed5ad021-2190-447f-8855-4a9f507c10db None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 507.339370] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ed5ad021-2190-447f-8855-4a9f507c10db None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 507.339523] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ed5ad021-2190-447f-8855-4a9f507c10db None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 507.339676] env[68674]: DEBUG nova.compute.resource_tracker [None req-ed5ad021-2190-447f-8855-4a9f507c10db None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68674) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 507.340653] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83150b35-d3d9-46b8-9814-3820f66e00b1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 507.349246] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35a0f931-d132-48c5-9ec3-5c536638cd6a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 507.363572] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30509572-1cd6-4893-a29e-5e09da040652 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 507.369978] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38867a1f-152f-440e-916c-c9eb91801784 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 507.398316] env[68674]: DEBUG nova.compute.resource_tracker [None req-ed5ad021-2190-447f-8855-4a9f507c10db None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181038MB free_disk=121GB free_vcpus=48 pci_devices=None {{(pid=68674) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 507.398462] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ed5ad021-2190-447f-8855-4a9f507c10db None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 507.398694] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ed5ad021-2190-447f-8855-4a9f507c10db None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 507.901600] env[68674]: WARNING nova.compute.resource_tracker [None req-ed5ad021-2190-447f-8855-4a9f507c10db None None] No compute node record for cpu-1:ade3f042-7427-494b-9654-0b65e074850c: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host ade3f042-7427-494b-9654-0b65e074850c could not be found. [ 508.405197] env[68674]: INFO nova.compute.resource_tracker [None req-ed5ad021-2190-447f-8855-4a9f507c10db None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: ade3f042-7427-494b-9654-0b65e074850c [ 509.914683] env[68674]: DEBUG nova.compute.resource_tracker [None req-ed5ad021-2190-447f-8855-4a9f507c10db None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=68674) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 509.915089] env[68674]: DEBUG nova.compute.resource_tracker [None req-ed5ad021-2190-447f-8855-4a9f507c10db None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=68674) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 510.064890] env[68674]: INFO nova.scheduler.client.report [None req-ed5ad021-2190-447f-8855-4a9f507c10db None None] [req-fae1f924-99bf-4d64-8183-f97d716e6440] Created resource provider record via placement API for resource provider with UUID ade3f042-7427-494b-9654-0b65e074850c and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 510.078378] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4abafa8f-5648-4cdc-9e60-c4e9575b6528 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.085996] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ca4f93d-450c-4a05-b6b7-5efff47a4fc3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.115882] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9c2f152-b326-483a-ab29-21f5a568b4b7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.122696] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b72f57e2-29d5-42c7-ba77-77689f824a98 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.135927] env[68674]: DEBUG nova.compute.provider_tree [None req-ed5ad021-2190-447f-8855-4a9f507c10db None None] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 510.672392] env[68674]: DEBUG nova.scheduler.client.report [None req-ed5ad021-2190-447f-8855-4a9f507c10db None None] Updated inventory for provider ade3f042-7427-494b-9654-0b65e074850c with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 510.672622] env[68674]: DEBUG nova.compute.provider_tree [None req-ed5ad021-2190-447f-8855-4a9f507c10db None None] Updating resource provider ade3f042-7427-494b-9654-0b65e074850c generation from 0 to 1 during operation: update_inventory {{(pid=68674) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 510.672762] env[68674]: DEBUG nova.compute.provider_tree [None req-ed5ad021-2190-447f-8855-4a9f507c10db None None] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 510.721150] env[68674]: DEBUG nova.compute.provider_tree [None req-ed5ad021-2190-447f-8855-4a9f507c10db None None] Updating resource provider ade3f042-7427-494b-9654-0b65e074850c generation from 1 to 2 during operation: update_traits {{(pid=68674) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 511.225967] env[68674]: DEBUG nova.compute.resource_tracker [None req-ed5ad021-2190-447f-8855-4a9f507c10db None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68674) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 511.226317] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ed5ad021-2190-447f-8855-4a9f507c10db None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.828s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 511.226376] env[68674]: DEBUG nova.service [None req-ed5ad021-2190-447f-8855-4a9f507c10db None None] Creating RPC server for service compute {{(pid=68674) start /opt/stack/nova/nova/service.py:186}} [ 511.241132] env[68674]: DEBUG nova.service [None req-ed5ad021-2190-447f-8855-4a9f507c10db None None] Join ServiceGroup membership for this service compute {{(pid=68674) start /opt/stack/nova/nova/service.py:203}} [ 511.241335] env[68674]: DEBUG nova.servicegroup.drivers.db [None req-ed5ad021-2190-447f-8855-4a9f507c10db None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=68674) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 543.244789] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._sync_power_states {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 543.750908] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Getting list of instances from cluster (obj){ [ 543.750908] env[68674]: value = "domain-c8" [ 543.750908] env[68674]: _type = "ClusterComputeResource" [ 543.750908] env[68674]: } {{(pid=68674) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 543.754124] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee8bba62-f5ae-436f-9136-23bea2f4de5b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.765295] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Got total of 0 instances {{(pid=68674) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 543.765716] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 543.766264] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Getting list of instances from cluster (obj){ [ 543.766264] env[68674]: value = "domain-c8" [ 543.766264] env[68674]: _type = "ClusterComputeResource" [ 543.766264] env[68674]: } {{(pid=68674) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 543.767589] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bee063f1-cfa1-4138-9465-4cbbd0a4bbd2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.777648] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Got total of 0 instances {{(pid=68674) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 544.449472] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Acquiring lock "0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 544.451951] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Lock "0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 544.956534] env[68674]: DEBUG nova.compute.manager [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 545.502114] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 545.502722] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 545.505152] env[68674]: INFO nova.compute.claims [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 546.563224] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41b95cc0-48b9-4473-b8f7-fdf1b4ecf8ff {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.573742] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c44dff47-b6d8-4dfc-906f-ab0583853451 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.613987] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b198954-e423-4e82-8cda-b807ce403b24 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.624288] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96d45262-cb2b-480d-9379-ed2d4e434d2e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.638048] env[68674]: DEBUG nova.compute.provider_tree [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 547.144200] env[68674]: DEBUG nova.scheduler.client.report [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 547.236272] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Acquiring lock "58830b0e-dbf3-424d-8b9a-bb298b6bea21" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 547.236976] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Lock "58830b0e-dbf3-424d-8b9a-bb298b6bea21" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 547.653105] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.150s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 547.653752] env[68674]: DEBUG nova.compute.manager [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 547.739246] env[68674]: DEBUG nova.compute.manager [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 548.018316] env[68674]: DEBUG oslo_concurrency.lockutils [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "e84db5bd-b6ec-42ef-9c34-a4160c44d973" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 548.018549] env[68674]: DEBUG oslo_concurrency.lockutils [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "e84db5bd-b6ec-42ef-9c34-a4160c44d973" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 548.158625] env[68674]: DEBUG nova.compute.utils [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 548.160180] env[68674]: DEBUG nova.compute.manager [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Not allocating networking since 'none' was specified. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 548.270340] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 548.270340] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 548.270340] env[68674]: INFO nova.compute.claims [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 548.524204] env[68674]: DEBUG nova.compute.manager [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 548.661995] env[68674]: DEBUG nova.compute.manager [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 549.050451] env[68674]: DEBUG oslo_concurrency.lockutils [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 549.358704] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1117098c-03fa-4688-ae5f-9d31eefd0809 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.368245] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eadbc3fb-e95a-40b0-a460-39cc39584a68 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.408026] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d22a1a7-eb5c-4acf-8bc4-173920c78b8d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.416499] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b7680d-e665-4aa4-9c2b-3a071861a7a5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.431596] env[68674]: DEBUG nova.compute.provider_tree [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 549.676272] env[68674]: DEBUG nova.compute.manager [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 549.806036] env[68674]: DEBUG nova.virt.hardware [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 549.806036] env[68674]: DEBUG nova.virt.hardware [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 549.806036] env[68674]: DEBUG nova.virt.hardware [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 549.806378] env[68674]: DEBUG nova.virt.hardware [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 549.806378] env[68674]: DEBUG nova.virt.hardware [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 549.806378] env[68674]: DEBUG nova.virt.hardware [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 549.806378] env[68674]: DEBUG nova.virt.hardware [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 549.806378] env[68674]: DEBUG nova.virt.hardware [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 549.806529] env[68674]: DEBUG nova.virt.hardware [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 549.806529] env[68674]: DEBUG nova.virt.hardware [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 549.806529] env[68674]: DEBUG nova.virt.hardware [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 549.806685] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b65e3c6-6c74-4682-b3bd-ef7ea7e367f5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.814765] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa6ca37d-dd74-4013-b489-e292a4ea20ce {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.838053] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18d9bf3c-22bf-47c1-bf64-de759f26b29c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.858171] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Instance VIF info [] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 549.866976] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 549.867317] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f451b28a-9201-4765-be9a-02a4620c5257 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.880698] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Created folder: OpenStack in parent group-v4. [ 549.880927] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Creating folder: Project (89fd55a1618a488fb7aced246fb3b41f). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 549.881189] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b7a1d058-66a4-44e4-90af-d8460e87f2af {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.891149] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Created folder: Project (89fd55a1618a488fb7aced246fb3b41f) in parent group-v647377. [ 549.891346] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Creating folder: Instances. Parent ref: group-v647378. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 549.892909] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2c280a72-a271-43e1-87b8-ef0065dfd715 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.900321] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Created folder: Instances in parent group-v647378. [ 549.901024] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 549.901024] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 549.901024] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d2266955-2110-4f1c-b40b-98549c94b20e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.919553] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 549.919553] env[68674]: value = "task-3239308" [ 549.919553] env[68674]: _type = "Task" [ 549.919553] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 549.933214] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239308, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 549.935819] env[68674]: DEBUG nova.scheduler.client.report [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 550.050942] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Acquiring lock "89ccc16e-d0e5-4f7d-985c-8693188e7ed5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 550.051209] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Lock "89ccc16e-d0e5-4f7d-985c-8693188e7ed5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 550.433644] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239308, 'name': CreateVM_Task, 'duration_secs': 0.312201} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 550.433930] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 550.435471] env[68674]: DEBUG oslo_vmware.service [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01c9de5e-5c2d-4922-92d1-4ba355cc918c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.443100] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 550.443383] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 550.444126] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 550.445914] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.177s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 550.445914] env[68674]: DEBUG nova.compute.manager [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 550.448624] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9d78fe5-b25a-4903-b785-06a1c35485f5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.450677] env[68674]: DEBUG oslo_concurrency.lockutils [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.401s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 550.452450] env[68674]: INFO nova.compute.claims [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 550.461307] env[68674]: DEBUG oslo_vmware.api [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Waiting for the task: (returnval){ [ 550.461307] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52193a42-783f-4fe0-1352-907b0479b277" [ 550.461307] env[68674]: _type = "Task" [ 550.461307] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 550.471647] env[68674]: DEBUG oslo_vmware.api [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52193a42-783f-4fe0-1352-907b0479b277, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 550.555132] env[68674]: DEBUG nova.compute.manager [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 550.954354] env[68674]: DEBUG nova.compute.utils [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 550.957831] env[68674]: DEBUG nova.compute.manager [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 550.957831] env[68674]: DEBUG nova.network.neutron [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 550.981656] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 550.982144] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 550.982620] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 550.983601] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 550.984375] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 550.985508] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b590673b-428a-4d27-a3dd-9da9bf27f76e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.005578] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 551.005578] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 551.006375] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43d6168a-6df9-4f53-9e57-a65643fe57c2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.013934] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a512f68-968f-4e69-8877-928871a401ea {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.019858] env[68674]: DEBUG oslo_vmware.api [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Waiting for the task: (returnval){ [ 551.019858] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]520e6dfd-6182-5bc3-e275-5f4524d371e0" [ 551.019858] env[68674]: _type = "Task" [ 551.019858] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 551.029524] env[68674]: DEBUG oslo_vmware.api [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]520e6dfd-6182-5bc3-e275-5f4524d371e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 551.042607] env[68674]: DEBUG nova.policy [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3b6221acd674413abb69f483d19d7fdd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c18d4b8073be4aa59800f2c6f482fdad', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 551.092775] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 551.474806] env[68674]: DEBUG nova.compute.manager [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 551.542333] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Preparing fetch location {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 551.542594] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Creating directory with path [datastore1] vmware_temp/ccb03591-af1f-45ca-acaa-942f62225d8f/b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 551.543357] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7394d5cd-0c29-4c25-bfb6-0ffbee506381 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.565392] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Created directory with path [datastore1] vmware_temp/ccb03591-af1f-45ca-acaa-942f62225d8f/b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 551.565807] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Fetch image to [datastore1] vmware_temp/ccb03591-af1f-45ca-acaa-942f62225d8f/b84d9354-ef6b-46ca-9dae-6549fa89bbea/tmp-sparse.vmdk {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 551.569236] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Downloading image file data b84d9354-ef6b-46ca-9dae-6549fa89bbea to [datastore1] vmware_temp/ccb03591-af1f-45ca-acaa-942f62225d8f/b84d9354-ef6b-46ca-9dae-6549fa89bbea/tmp-sparse.vmdk on the data store datastore1 {{(pid=68674) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 551.569236] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fcab523-b55b-4592-ac1a-5171227b18ed {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.585113] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6329d3c4-44c3-407c-8c97-5ec082cdcf05 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.601819] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63a60c4a-766f-4263-910a-b0a90907d380 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.607105] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aa9c8e4-23de-4c80-8c22-94cb74cd1717 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.650226] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d9e70c9-06e1-4fd8-9da5-b7120e4b94a9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.660179] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-152506a9-e662-4e17-b4e2-7420cd3a8080 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.662232] env[68674]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e734d130-11bb-47e7-9ef3-ea8bbca314e1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.697517] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8da5f0a-7112-4a66-9eb9-15b397d27938 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.700515] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Downloading image file data b84d9354-ef6b-46ca-9dae-6549fa89bbea to the data store datastore1 {{(pid=68674) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 551.712420] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83219cc8-ad71-4582-83e1-f3dfe10d9000 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.728497] env[68674]: DEBUG nova.compute.provider_tree [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 551.906581] env[68674]: DEBUG oslo_vmware.rw_handles [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ccb03591-af1f-45ca-acaa-942f62225d8f/b84d9354-ef6b-46ca-9dae-6549fa89bbea/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68674) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 552.062815] env[68674]: DEBUG nova.network.neutron [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Successfully created port: 27bb7642-85fc-46b6-9ac7-8a3a6db3271a {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 552.234149] env[68674]: DEBUG nova.scheduler.client.report [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 552.487293] env[68674]: DEBUG nova.compute.manager [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 552.526361] env[68674]: DEBUG nova.virt.hardware [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 552.526361] env[68674]: DEBUG nova.virt.hardware [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 552.526757] env[68674]: DEBUG nova.virt.hardware [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 552.526757] env[68674]: DEBUG nova.virt.hardware [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 552.526823] env[68674]: DEBUG nova.virt.hardware [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 552.526949] env[68674]: DEBUG nova.virt.hardware [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 552.527215] env[68674]: DEBUG nova.virt.hardware [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 552.527497] env[68674]: DEBUG nova.virt.hardware [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 552.527636] env[68674]: DEBUG nova.virt.hardware [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 552.528561] env[68674]: DEBUG nova.virt.hardware [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 552.528561] env[68674]: DEBUG nova.virt.hardware [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 552.529253] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f286350c-b212-4fd4-b349-e045de22c7d8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.545818] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f7ec99-e245-4515-8ca3-baca74e62545 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.594316] env[68674]: DEBUG oslo_vmware.rw_handles [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Completed reading data from the image iterator. {{(pid=68674) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 552.594683] env[68674]: DEBUG oslo_vmware.rw_handles [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ccb03591-af1f-45ca-acaa-942f62225d8f/b84d9354-ef6b-46ca-9dae-6549fa89bbea/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68674) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 552.735519] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Downloaded image file data b84d9354-ef6b-46ca-9dae-6549fa89bbea to vmware_temp/ccb03591-af1f-45ca-acaa-942f62225d8f/b84d9354-ef6b-46ca-9dae-6549fa89bbea/tmp-sparse.vmdk on the data store datastore1 {{(pid=68674) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 552.738172] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Caching image {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 552.739605] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Copying Virtual Disk [datastore1] vmware_temp/ccb03591-af1f-45ca-acaa-942f62225d8f/b84d9354-ef6b-46ca-9dae-6549fa89bbea/tmp-sparse.vmdk to [datastore1] vmware_temp/ccb03591-af1f-45ca-acaa-942f62225d8f/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 552.739865] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ed1153b0-1ad8-458a-8d64-ca2f3e26469d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.745160] env[68674]: DEBUG oslo_concurrency.lockutils [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.294s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 552.746241] env[68674]: DEBUG nova.compute.manager [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 552.753077] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.658s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 552.753077] env[68674]: INFO nova.compute.claims [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 552.764843] env[68674]: DEBUG oslo_vmware.api [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Waiting for the task: (returnval){ [ 552.764843] env[68674]: value = "task-3239309" [ 552.764843] env[68674]: _type = "Task" [ 552.764843] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 552.778033] env[68674]: DEBUG oslo_vmware.api [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Task: {'id': task-3239309, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.201525] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 553.201525] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 553.254018] env[68674]: DEBUG nova.compute.utils [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 553.254818] env[68674]: DEBUG nova.compute.manager [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 553.257639] env[68674]: DEBUG nova.network.neutron [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 553.285055] env[68674]: DEBUG oslo_vmware.api [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Task: {'id': task-3239309, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.366165] env[68674]: DEBUG nova.policy [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1891413e35f845a2b761f474df3eb6c8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3dceab4b22c34737bc85ee5a5ded00d3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 553.703779] env[68674]: DEBUG nova.compute.manager [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 553.760776] env[68674]: DEBUG nova.compute.manager [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 553.790229] env[68674]: DEBUG oslo_vmware.api [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Task: {'id': task-3239309, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.656049} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 553.791311] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Copied Virtual Disk [datastore1] vmware_temp/ccb03591-af1f-45ca-acaa-942f62225d8f/b84d9354-ef6b-46ca-9dae-6549fa89bbea/tmp-sparse.vmdk to [datastore1] vmware_temp/ccb03591-af1f-45ca-acaa-942f62225d8f/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 553.791937] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Deleting the datastore file [datastore1] vmware_temp/ccb03591-af1f-45ca-acaa-942f62225d8f/b84d9354-ef6b-46ca-9dae-6549fa89bbea/tmp-sparse.vmdk {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 553.792432] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-93894bc2-83e1-4c49-bc6d-ef60dec81b2b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.802355] env[68674]: DEBUG oslo_vmware.api [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Waiting for the task: (returnval){ [ 553.802355] env[68674]: value = "task-3239310" [ 553.802355] env[68674]: _type = "Task" [ 553.802355] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 553.821042] env[68674]: DEBUG oslo_vmware.api [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Task: {'id': task-3239310, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.891960] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-924188df-541a-4246-a7ad-5695cf22f403 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.901763] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca08fa84-2661-49c3-9144-ccd65287d6cb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.946682] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-420f0f40-c2fb-44bb-b122-42a774b2a3ca {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.955877] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b69f90f-5b7c-4f31-b5cc-9c7dec55f709 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.970597] env[68674]: DEBUG nova.compute.provider_tree [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 554.231267] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 554.311150] env[68674]: DEBUG oslo_vmware.api [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Task: {'id': task-3239310, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.021394} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 554.311570] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 554.311806] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Moving file from [datastore1] vmware_temp/ccb03591-af1f-45ca-acaa-942f62225d8f/b84d9354-ef6b-46ca-9dae-6549fa89bbea to [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea. {{(pid=68674) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 554.312072] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-961539a6-87a9-4afd-ae6c-2597d7e77c7c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.320718] env[68674]: DEBUG oslo_vmware.api [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Waiting for the task: (returnval){ [ 554.320718] env[68674]: value = "task-3239311" [ 554.320718] env[68674]: _type = "Task" [ 554.320718] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.331255] env[68674]: DEBUG oslo_vmware.api [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Task: {'id': task-3239311, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.473582] env[68674]: DEBUG nova.scheduler.client.report [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 554.697487] env[68674]: DEBUG nova.network.neutron [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Successfully created port: 3c8004db-9f93-48c8-9861-4f8e8f1cd2c9 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 554.776876] env[68674]: DEBUG nova.compute.manager [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 554.797110] env[68674]: DEBUG nova.virt.hardware [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 554.797449] env[68674]: DEBUG nova.virt.hardware [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 554.797570] env[68674]: DEBUG nova.virt.hardware [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 554.797687] env[68674]: DEBUG nova.virt.hardware [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 554.797822] env[68674]: DEBUG nova.virt.hardware [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 554.797976] env[68674]: DEBUG nova.virt.hardware [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 554.798192] env[68674]: DEBUG nova.virt.hardware [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 554.798348] env[68674]: DEBUG nova.virt.hardware [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 554.798622] env[68674]: DEBUG nova.virt.hardware [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 554.798707] env[68674]: DEBUG nova.virt.hardware [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 554.798860] env[68674]: DEBUG nova.virt.hardware [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 554.799761] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49193eca-3a70-415a-b579-54b950901331 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.807931] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a829732f-b2f8-4439-ab96-f7ab2a51c960 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.828949] env[68674]: DEBUG oslo_vmware.api [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Task: {'id': task-3239311, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.02729} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 554.829099] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] File moved {{(pid=68674) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 554.829279] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Cleaning up location [datastore1] vmware_temp/ccb03591-af1f-45ca-acaa-942f62225d8f {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 554.829461] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Deleting the datastore file [datastore1] vmware_temp/ccb03591-af1f-45ca-acaa-942f62225d8f {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 554.829717] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-81d72173-b91a-4298-aa1d-78698c2a6714 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.835812] env[68674]: DEBUG oslo_vmware.api [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Waiting for the task: (returnval){ [ 554.835812] env[68674]: value = "task-3239312" [ 554.835812] env[68674]: _type = "Task" [ 554.835812] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.843206] env[68674]: DEBUG oslo_vmware.api [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Task: {'id': task-3239312, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.979906] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.229s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 554.980488] env[68674]: DEBUG nova.compute.manager [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 554.983233] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.753s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 554.984665] env[68674]: INFO nova.compute.claims [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 555.142037] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "02d4aee3-7267-4658-a277-8a9a00dd9f6e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 555.143439] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "02d4aee3-7267-4658-a277-8a9a00dd9f6e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 555.348554] env[68674]: DEBUG oslo_vmware.api [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Task: {'id': task-3239312, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.026352} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 555.348878] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 555.349627] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a9dc323-455a-49d2-a9e7-8bf99e629e3b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.355494] env[68674]: DEBUG oslo_vmware.api [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Waiting for the task: (returnval){ [ 555.355494] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52212717-6628-9a7c-2e71-915b8f54a165" [ 555.355494] env[68674]: _type = "Task" [ 555.355494] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.363365] env[68674]: DEBUG oslo_vmware.api [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52212717-6628-9a7c-2e71-915b8f54a165, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.491113] env[68674]: DEBUG nova.compute.utils [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 555.499445] env[68674]: DEBUG nova.compute.manager [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Not allocating networking since 'none' was specified. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 555.527121] env[68674]: DEBUG nova.network.neutron [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Successfully updated port: 27bb7642-85fc-46b6-9ac7-8a3a6db3271a {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 555.648382] env[68674]: DEBUG nova.compute.manager [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 555.769867] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquiring lock "e75d2bc7-f356-4443-9641-d9ebf35843cd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 555.770259] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lock "e75d2bc7-f356-4443-9641-d9ebf35843cd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 555.874744] env[68674]: DEBUG oslo_vmware.api [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52212717-6628-9a7c-2e71-915b8f54a165, 'name': SearchDatastore_Task, 'duration_secs': 0.007642} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 555.875081] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 555.875279] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae/0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 555.876776] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-97d26776-0ebc-45f1-b73e-f5b13bc25372 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.885127] env[68674]: DEBUG oslo_vmware.api [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Waiting for the task: (returnval){ [ 555.885127] env[68674]: value = "task-3239313" [ 555.885127] env[68674]: _type = "Task" [ 555.885127] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.893539] env[68674]: DEBUG oslo_vmware.api [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Task: {'id': task-3239313, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.993974] env[68674]: DEBUG nova.compute.manager [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 556.030821] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Acquiring lock "refresh_cache-58830b0e-dbf3-424d-8b9a-bb298b6bea21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 556.030981] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Acquired lock "refresh_cache-58830b0e-dbf3-424d-8b9a-bb298b6bea21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 556.031163] env[68674]: DEBUG nova.network.neutron [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 556.166443] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d59294c-f037-497f-a726-c774dfc51bb1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.183700] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f60c1bac-a28e-459e-a7bf-0a0a3efed9c7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.194172] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 556.234778] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8762867-6c36-45d6-9a0e-9229c11da74d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.244900] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d246dc1-63f6-47bf-b38c-40ffed6cbc05 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.260857] env[68674]: DEBUG nova.compute.provider_tree [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 556.280216] env[68674]: DEBUG nova.compute.manager [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 556.399750] env[68674]: DEBUG oslo_vmware.api [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Task: {'id': task-3239313, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.468663} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 556.403758] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae/0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 556.403947] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 556.404229] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6fbaf347-55f0-427f-9824-7691c1c55428 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.411656] env[68674]: DEBUG oslo_vmware.api [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Waiting for the task: (returnval){ [ 556.411656] env[68674]: value = "task-3239314" [ 556.411656] env[68674]: _type = "Task" [ 556.411656] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 556.421469] env[68674]: DEBUG oslo_vmware.api [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Task: {'id': task-3239314, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.760177] env[68674]: DEBUG nova.network.neutron [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 556.767628] env[68674]: DEBUG nova.scheduler.client.report [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 556.814887] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 556.925805] env[68674]: DEBUG oslo_vmware.api [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Task: {'id': task-3239314, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061613} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 556.926106] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 556.929700] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5a8bb82-be8a-445d-9fde-7796eb28b358 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.954786] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Reconfiguring VM instance instance-00000001 to attach disk [datastore1] 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae/0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 556.955143] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1adf9a5d-4017-4d55-9c6f-efc513c4640e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.976351] env[68674]: DEBUG oslo_vmware.api [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Waiting for the task: (returnval){ [ 556.976351] env[68674]: value = "task-3239315" [ 556.976351] env[68674]: _type = "Task" [ 556.976351] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 556.989118] env[68674]: DEBUG oslo_vmware.api [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Task: {'id': task-3239315, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.005729] env[68674]: DEBUG nova.compute.manager [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 557.043389] env[68674]: DEBUG nova.virt.hardware [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 557.044386] env[68674]: DEBUG nova.virt.hardware [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 557.044386] env[68674]: DEBUG nova.virt.hardware [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 557.044549] env[68674]: DEBUG nova.virt.hardware [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 557.044721] env[68674]: DEBUG nova.virt.hardware [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 557.044888] env[68674]: DEBUG nova.virt.hardware [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 557.045328] env[68674]: DEBUG nova.virt.hardware [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 557.045328] env[68674]: DEBUG nova.virt.hardware [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 557.045509] env[68674]: DEBUG nova.virt.hardware [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 557.045704] env[68674]: DEBUG nova.virt.hardware [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 557.045925] env[68674]: DEBUG nova.virt.hardware [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 557.047102] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a27480fa-6708-430b-8193-d465e5200d1f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.059204] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db992f34-3b25-4c0f-bea2-2883f237e878 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.076115] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Instance VIF info [] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 557.082865] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Creating folder: Project (ca39c87ee4d7403e99c5e6f735a5ceba). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 557.083095] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-103f5c4a-f70c-49e8-a8d3-fbd3963adfcf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.094271] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Created folder: Project (ca39c87ee4d7403e99c5e6f735a5ceba) in parent group-v647377. [ 557.094372] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Creating folder: Instances. Parent ref: group-v647381. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 557.095164] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-46e4cabb-1735-40a0-ae9c-df6a08f58ba4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.103593] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Created folder: Instances in parent group-v647381. [ 557.103726] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 557.104432] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 557.104432] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-36bfef11-f628-4016-a6c0-d47c93e45ad3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.120765] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 557.120765] env[68674]: value = "task-3239318" [ 557.120765] env[68674]: _type = "Task" [ 557.120765] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.128734] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239318, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.277420] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.294s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 557.278318] env[68674]: DEBUG nova.compute.manager [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 557.281219] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.087s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 557.283318] env[68674]: INFO nova.compute.claims [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 557.387298] env[68674]: DEBUG nova.network.neutron [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Updating instance_info_cache with network_info: [{"id": "27bb7642-85fc-46b6-9ac7-8a3a6db3271a", "address": "fa:16:3e:55:ec:a9", "network": {"id": "a8c05d42-7059-4056-96f9-f929e862948a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-2017205475-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c18d4b8073be4aa59800f2c6f482fdad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4c5eb94-841c-4713-985a-8fc4117fbaf1", "external-id": "nsx-vlan-transportzone-425", "segmentation_id": 425, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27bb7642-85", "ovs_interfaceid": "27bb7642-85fc-46b6-9ac7-8a3a6db3271a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 557.490725] env[68674]: DEBUG oslo_vmware.api [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Task: {'id': task-3239315, 'name': ReconfigVM_Task, 'duration_secs': 0.343064} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 557.490725] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Reconfigured VM instance instance-00000001 to attach disk [datastore1] 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae/0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 557.491126] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2191f0ac-f896-42ec-9d92-1fa616e7a5f8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.501135] env[68674]: DEBUG oslo_vmware.api [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Waiting for the task: (returnval){ [ 557.501135] env[68674]: value = "task-3239319" [ 557.501135] env[68674]: _type = "Task" [ 557.501135] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.512296] env[68674]: DEBUG oslo_vmware.api [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Task: {'id': task-3239319, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.520368] env[68674]: DEBUG nova.compute.manager [req-29fbffd7-a240-49a1-bfd7-a9e2d9bc7f13 req-31c6736e-a14d-4241-b1b5-9c3fdf7ab60a service nova] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Received event network-vif-plugged-27bb7642-85fc-46b6-9ac7-8a3a6db3271a {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 557.520368] env[68674]: DEBUG oslo_concurrency.lockutils [req-29fbffd7-a240-49a1-bfd7-a9e2d9bc7f13 req-31c6736e-a14d-4241-b1b5-9c3fdf7ab60a service nova] Acquiring lock "58830b0e-dbf3-424d-8b9a-bb298b6bea21-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 557.520368] env[68674]: DEBUG oslo_concurrency.lockutils [req-29fbffd7-a240-49a1-bfd7-a9e2d9bc7f13 req-31c6736e-a14d-4241-b1b5-9c3fdf7ab60a service nova] Lock "58830b0e-dbf3-424d-8b9a-bb298b6bea21-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 557.520368] env[68674]: DEBUG oslo_concurrency.lockutils [req-29fbffd7-a240-49a1-bfd7-a9e2d9bc7f13 req-31c6736e-a14d-4241-b1b5-9c3fdf7ab60a service nova] Lock "58830b0e-dbf3-424d-8b9a-bb298b6bea21-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 557.520368] env[68674]: DEBUG nova.compute.manager [req-29fbffd7-a240-49a1-bfd7-a9e2d9bc7f13 req-31c6736e-a14d-4241-b1b5-9c3fdf7ab60a service nova] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] No waiting events found dispatching network-vif-plugged-27bb7642-85fc-46b6-9ac7-8a3a6db3271a {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 557.520574] env[68674]: WARNING nova.compute.manager [req-29fbffd7-a240-49a1-bfd7-a9e2d9bc7f13 req-31c6736e-a14d-4241-b1b5-9c3fdf7ab60a service nova] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Received unexpected event network-vif-plugged-27bb7642-85fc-46b6-9ac7-8a3a6db3271a for instance with vm_state building and task_state spawning. [ 557.632908] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239318, 'name': CreateVM_Task, 'duration_secs': 0.351014} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 557.633096] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 557.634129] env[68674]: DEBUG oslo_vmware.service [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf56623-d888-420c-b8e5-967d5ceb336f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.640231] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 557.640231] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 557.640723] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 557.641372] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62f00fd1-c7ab-4b20-8778-273b2be31b5e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.648488] env[68674]: DEBUG oslo_vmware.api [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Waiting for the task: (returnval){ [ 557.648488] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52bb3788-627f-a7e0-d01f-9e907fa9741c" [ 557.648488] env[68674]: _type = "Task" [ 557.648488] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.659965] env[68674]: DEBUG oslo_vmware.api [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52bb3788-627f-a7e0-d01f-9e907fa9741c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.686380] env[68674]: DEBUG nova.network.neutron [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Successfully updated port: 3c8004db-9f93-48c8-9861-4f8e8f1cd2c9 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 557.791884] env[68674]: DEBUG nova.compute.utils [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 557.791884] env[68674]: DEBUG nova.compute.manager [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 557.792075] env[68674]: DEBUG nova.network.neutron [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 557.882037] env[68674]: DEBUG nova.policy [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'abfd3423bbdb4ee9a94b31fb0f7aa860', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3fa7abd14180453bb12e9dd5fc24523f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 557.890251] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Releasing lock "refresh_cache-58830b0e-dbf3-424d-8b9a-bb298b6bea21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 557.890610] env[68674]: DEBUG nova.compute.manager [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Instance network_info: |[{"id": "27bb7642-85fc-46b6-9ac7-8a3a6db3271a", "address": "fa:16:3e:55:ec:a9", "network": {"id": "a8c05d42-7059-4056-96f9-f929e862948a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-2017205475-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c18d4b8073be4aa59800f2c6f482fdad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4c5eb94-841c-4713-985a-8fc4117fbaf1", "external-id": "nsx-vlan-transportzone-425", "segmentation_id": 425, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27bb7642-85", "ovs_interfaceid": "27bb7642-85fc-46b6-9ac7-8a3a6db3271a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 557.891490] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:ec:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b4c5eb94-841c-4713-985a-8fc4117fbaf1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '27bb7642-85fc-46b6-9ac7-8a3a6db3271a', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 557.899369] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Creating folder: Project (c18d4b8073be4aa59800f2c6f482fdad). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 557.899678] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0c04fb98-fa12-4ce8-83ef-c488a0c04196 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.910553] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Created folder: Project (c18d4b8073be4aa59800f2c6f482fdad) in parent group-v647377. [ 557.910765] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Creating folder: Instances. Parent ref: group-v647384. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 557.911073] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5ea2ec00-666b-44c6-bb5c-fbe348194275 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.920015] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Created folder: Instances in parent group-v647384. [ 557.920257] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 557.920517] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 557.920654] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ece3fb26-48f3-4b24-ac84-078861143d70 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.940515] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 557.940515] env[68674]: value = "task-3239322" [ 557.940515] env[68674]: _type = "Task" [ 557.940515] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.948510] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239322, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.012877] env[68674]: DEBUG oslo_vmware.api [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Task: {'id': task-3239319, 'name': Rename_Task, 'duration_secs': 0.154262} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 558.013174] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 558.013422] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-28d633bd-f12c-466d-a03c-3ad216ebd3d4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.019911] env[68674]: DEBUG oslo_vmware.api [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Waiting for the task: (returnval){ [ 558.019911] env[68674]: value = "task-3239323" [ 558.019911] env[68674]: _type = "Task" [ 558.019911] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.027941] env[68674]: DEBUG oslo_vmware.api [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Task: {'id': task-3239323, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.159699] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 558.160476] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 558.160476] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 558.160628] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 558.160892] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 558.161254] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-873cc5ca-5fa2-4f08-afbb-71f32668ea7b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.172532] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 558.172849] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 558.174105] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-117574b4-8cc0-4b59-8745-701627162bd0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.182114] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3d615a3-4c06-4742-98e2-7d76ba2de5c6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.188079] env[68674]: DEBUG oslo_vmware.api [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Waiting for the task: (returnval){ [ 558.188079] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52da8256-ea33-ff8f-de82-2ab5be1d3e6b" [ 558.188079] env[68674]: _type = "Task" [ 558.188079] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.189037] env[68674]: DEBUG oslo_concurrency.lockutils [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "refresh_cache-e84db5bd-b6ec-42ef-9c34-a4160c44d973" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 558.189037] env[68674]: DEBUG oslo_concurrency.lockutils [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquired lock "refresh_cache-e84db5bd-b6ec-42ef-9c34-a4160c44d973" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 558.189037] env[68674]: DEBUG nova.network.neutron [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 558.199677] env[68674]: DEBUG oslo_vmware.api [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52da8256-ea33-ff8f-de82-2ab5be1d3e6b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.296339] env[68674]: DEBUG nova.compute.manager [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 558.454405] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239322, 'name': CreateVM_Task, 'duration_secs': 0.393281} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 558.454582] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 558.484116] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c153a1-fced-4761-9519-832562bd2824 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.492725] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77edde28-2705-477d-b927-9713f2234d46 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.537785] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1682ef05-8567-434b-a1be-0de0efb5b6dd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.551331] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1a8335d-239f-4539-8323-5568dabb176f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.556028] env[68674]: DEBUG oslo_vmware.api [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Task: {'id': task-3239323, 'name': PowerOnVM_Task, 'duration_secs': 0.500674} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 558.556709] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 558.557428] env[68674]: INFO nova.compute.manager [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Took 8.88 seconds to spawn the instance on the hypervisor. [ 558.557684] env[68674]: DEBUG nova.compute.manager [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 558.559110] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7c73d4f-79d6-43f1-9761-ed6f7646fdf4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.571366] env[68674]: DEBUG nova.compute.provider_tree [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 558.634768] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 558.634768] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 558.634768] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 558.634768] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7e674ee-cf74-4898-b388-5f453f9bb118 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.643632] env[68674]: DEBUG oslo_vmware.api [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Waiting for the task: (returnval){ [ 558.643632] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5289b4bc-61e9-7c00-d7e9-ec57139cca47" [ 558.643632] env[68674]: _type = "Task" [ 558.643632] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.655455] env[68674]: DEBUG oslo_vmware.api [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5289b4bc-61e9-7c00-d7e9-ec57139cca47, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.666028] env[68674]: DEBUG oslo_concurrency.lockutils [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquiring lock "160d9aa2-048d-45a2-ab55-581c8721ac3b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 558.666238] env[68674]: DEBUG oslo_concurrency.lockutils [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lock "160d9aa2-048d-45a2-ab55-581c8721ac3b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 558.701434] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Preparing fetch location {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 558.701749] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Creating directory with path [datastore2] vmware_temp/58f17604-1abc-424c-89e5-a76eab5adf8c/b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 558.702079] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b5d77b23-2430-4c82-b954-237930de530d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.716200] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Created directory with path [datastore2] vmware_temp/58f17604-1abc-424c-89e5-a76eab5adf8c/b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 558.716200] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Fetch image to [datastore2] vmware_temp/58f17604-1abc-424c-89e5-a76eab5adf8c/b84d9354-ef6b-46ca-9dae-6549fa89bbea/tmp-sparse.vmdk {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 558.716492] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Downloading image file data b84d9354-ef6b-46ca-9dae-6549fa89bbea to [datastore2] vmware_temp/58f17604-1abc-424c-89e5-a76eab5adf8c/b84d9354-ef6b-46ca-9dae-6549fa89bbea/tmp-sparse.vmdk on the data store datastore2 {{(pid=68674) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 558.717073] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5910caa-149d-4e6f-b4e6-6c219cc35532 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.729304] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bd7a4e5-54a3-45e9-bf41-d5172bd89640 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.743116] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f9fb48e-7af7-4d3b-8355-5e42b8da258c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.783294] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88318bc9-6136-49fb-b5dd-7b461580f512 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.789694] env[68674]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-68a654ea-37b6-479e-8018-445cac2bc7be {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.800954] env[68674]: DEBUG nova.network.neutron [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 558.822385] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Downloading image file data b84d9354-ef6b-46ca-9dae-6549fa89bbea to the data store datastore2 {{(pid=68674) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 558.899840] env[68674]: DEBUG nova.network.neutron [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Successfully created port: a459b31b-865e-45d7-a62b-b7c95eb50c15 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 558.908022] env[68674]: DEBUG oslo_vmware.rw_handles [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/58f17604-1abc-424c-89e5-a76eab5adf8c/b84d9354-ef6b-46ca-9dae-6549fa89bbea/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68674) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 559.074642] env[68674]: DEBUG nova.scheduler.client.report [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 559.097669] env[68674]: INFO nova.compute.manager [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Took 13.64 seconds to build instance. [ 559.153438] env[68674]: DEBUG oslo_vmware.api [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5289b4bc-61e9-7c00-d7e9-ec57139cca47, 'name': SearchDatastore_Task, 'duration_secs': 0.012618} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 559.154161] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 559.154478] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 559.154864] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 559.155117] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 559.155335] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 559.155567] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ee0035f3-dedc-44ec-99f5-54b003a588c6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.159803] env[68674]: DEBUG nova.network.neutron [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Updating instance_info_cache with network_info: [{"id": "3c8004db-9f93-48c8-9861-4f8e8f1cd2c9", "address": "fa:16:3e:55:c4:43", "network": {"id": "896418b0-8817-49dc-a965-e44ed5221810", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1185393062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3dceab4b22c34737bc85ee5a5ded00d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e7f6f41-f4eb-4832-a390-730fca1cf717", "external-id": "nsx-vlan-transportzone-724", "segmentation_id": 724, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c8004db-9f", "ovs_interfaceid": "3c8004db-9f93-48c8-9861-4f8e8f1cd2c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 559.166019] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 559.166019] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 559.169787] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e9e25d5-dca6-4819-a147-32b3199df995 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.169787] env[68674]: DEBUG nova.compute.manager [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 559.176035] env[68674]: DEBUG oslo_vmware.api [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Waiting for the task: (returnval){ [ 559.176035] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521f8dfe-01d9-afd0-0f00-bc6497bc7728" [ 559.176035] env[68674]: _type = "Task" [ 559.176035] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.185750] env[68674]: DEBUG oslo_vmware.api [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521f8dfe-01d9-afd0-0f00-bc6497bc7728, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.314140] env[68674]: DEBUG nova.compute.manager [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 559.353250] env[68674]: DEBUG nova.virt.hardware [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 559.353925] env[68674]: DEBUG nova.virt.hardware [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 559.353925] env[68674]: DEBUG nova.virt.hardware [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 559.353925] env[68674]: DEBUG nova.virt.hardware [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 559.354279] env[68674]: DEBUG nova.virt.hardware [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 559.354383] env[68674]: DEBUG nova.virt.hardware [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 559.354707] env[68674]: DEBUG nova.virt.hardware [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 559.354930] env[68674]: DEBUG nova.virt.hardware [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 559.355118] env[68674]: DEBUG nova.virt.hardware [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 559.355295] env[68674]: DEBUG nova.virt.hardware [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 559.355509] env[68674]: DEBUG nova.virt.hardware [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 559.356682] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20826499-7fa7-407c-9b06-8a4dea35cfc9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.369406] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a359c9c4-48b5-4c66-a142-b749217beb37 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.392539] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 559.392725] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 559.392995] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 559.393720] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 559.393720] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 559.394425] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 559.394425] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 559.394425] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68674) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 559.394425] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager.update_available_resource {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 559.589037] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.308s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 559.589830] env[68674]: DEBUG nova.compute.manager [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 559.595475] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.779s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 559.595691] env[68674]: INFO nova.compute.claims [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 559.600140] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6e731c1e-74ec-41e4-822d-b86630a97384 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Lock "0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.150s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 559.666264] env[68674]: DEBUG oslo_concurrency.lockutils [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Releasing lock "refresh_cache-e84db5bd-b6ec-42ef-9c34-a4160c44d973" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 559.666600] env[68674]: DEBUG nova.compute.manager [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Instance network_info: |[{"id": "3c8004db-9f93-48c8-9861-4f8e8f1cd2c9", "address": "fa:16:3e:55:c4:43", "network": {"id": "896418b0-8817-49dc-a965-e44ed5221810", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1185393062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3dceab4b22c34737bc85ee5a5ded00d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e7f6f41-f4eb-4832-a390-730fca1cf717", "external-id": "nsx-vlan-transportzone-724", "segmentation_id": 724, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c8004db-9f", "ovs_interfaceid": "3c8004db-9f93-48c8-9861-4f8e8f1cd2c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 559.667017] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:c4:43', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8e7f6f41-f4eb-4832-a390-730fca1cf717', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3c8004db-9f93-48c8-9861-4f8e8f1cd2c9', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 559.675344] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Creating folder: Project (3dceab4b22c34737bc85ee5a5ded00d3). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 559.680501] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-efa3da01-4b6f-4fb8-a6ce-1370c0b0e358 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.697192] env[68674]: DEBUG oslo_vmware.api [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521f8dfe-01d9-afd0-0f00-bc6497bc7728, 'name': SearchDatastore_Task, 'duration_secs': 0.010065} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 559.699450] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Created folder: Project (3dceab4b22c34737bc85ee5a5ded00d3) in parent group-v647377. [ 559.699624] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Creating folder: Instances. Parent ref: group-v647387. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 559.700846] env[68674]: DEBUG oslo_concurrency.lockutils [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 559.701576] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-052c5cf3-643d-4873-94e7-9eda3441c101 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.704251] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eeea5607-7a1c-4077-b07d-47e443b28518 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.708956] env[68674]: DEBUG oslo_vmware.api [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Waiting for the task: (returnval){ [ 559.708956] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b6f4a2-e5a5-741c-ff8e-68d508090c7b" [ 559.708956] env[68674]: _type = "Task" [ 559.708956] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.714964] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Created folder: Instances in parent group-v647387. [ 559.715243] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 559.715865] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 559.716019] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7ce14fef-1781-45ab-bda1-03a257bc272f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.738277] env[68674]: DEBUG oslo_vmware.rw_handles [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Completed reading data from the image iterator. {{(pid=68674) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 559.738277] env[68674]: DEBUG oslo_vmware.rw_handles [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/58f17604-1abc-424c-89e5-a76eab5adf8c/b84d9354-ef6b-46ca-9dae-6549fa89bbea/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68674) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 559.742599] env[68674]: DEBUG oslo_vmware.api [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b6f4a2-e5a5-741c-ff8e-68d508090c7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.749775] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 559.749775] env[68674]: value = "task-3239326" [ 559.749775] env[68674]: _type = "Task" [ 559.749775] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.757329] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239326, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.781305] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Downloaded image file data b84d9354-ef6b-46ca-9dae-6549fa89bbea to vmware_temp/58f17604-1abc-424c-89e5-a76eab5adf8c/b84d9354-ef6b-46ca-9dae-6549fa89bbea/tmp-sparse.vmdk on the data store datastore2 {{(pid=68674) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 559.783295] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Caching image {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 559.783690] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Copying Virtual Disk [datastore2] vmware_temp/58f17604-1abc-424c-89e5-a76eab5adf8c/b84d9354-ef6b-46ca-9dae-6549fa89bbea/tmp-sparse.vmdk to [datastore2] vmware_temp/58f17604-1abc-424c-89e5-a76eab5adf8c/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 559.783838] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0a1dfc2a-b3c5-4d5b-9d43-f795ad037a0c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.793799] env[68674]: DEBUG oslo_vmware.api [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Waiting for the task: (returnval){ [ 559.793799] env[68674]: value = "task-3239327" [ 559.793799] env[68674]: _type = "Task" [ 559.793799] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.803250] env[68674]: DEBUG oslo_vmware.api [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239327, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.905220] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 560.098078] env[68674]: DEBUG nova.compute.utils [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 560.099736] env[68674]: DEBUG nova.compute.manager [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 560.099970] env[68674]: DEBUG nova.network.neutron [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 560.185252] env[68674]: DEBUG nova.policy [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'abfd3423bbdb4ee9a94b31fb0f7aa860', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3fa7abd14180453bb12e9dd5fc24523f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 560.224786] env[68674]: DEBUG oslo_vmware.api [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b6f4a2-e5a5-741c-ff8e-68d508090c7b, 'name': SearchDatastore_Task, 'duration_secs': 0.011775} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.225087] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 560.225430] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 58830b0e-dbf3-424d-8b9a-bb298b6bea21/58830b0e-dbf3-424d-8b9a-bb298b6bea21.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 560.225707] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cc4f1ca8-06c4-4939-a01c-5bbb1dc0507a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.234198] env[68674]: DEBUG oslo_vmware.api [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Waiting for the task: (returnval){ [ 560.234198] env[68674]: value = "task-3239328" [ 560.234198] env[68674]: _type = "Task" [ 560.234198] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.242312] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Acquiring lock "a123c5f2-e775-4dd2-9a5a-35e7d6705dfa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 560.243212] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Lock "a123c5f2-e775-4dd2-9a5a-35e7d6705dfa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 560.247468] env[68674]: DEBUG oslo_vmware.api [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Task: {'id': task-3239328, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.258437] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239326, 'name': CreateVM_Task, 'duration_secs': 0.42411} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.258621] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 560.259325] env[68674]: DEBUG oslo_concurrency.lockutils [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 560.259527] env[68674]: DEBUG oslo_concurrency.lockutils [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 560.259879] env[68674]: DEBUG oslo_concurrency.lockutils [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 560.260147] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1044d8d5-69be-4851-a4c8-73a744821d1e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.265356] env[68674]: DEBUG oslo_vmware.api [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 560.265356] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521209bf-78ec-c940-ab62-858a704d7dfe" [ 560.265356] env[68674]: _type = "Task" [ 560.265356] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.275079] env[68674]: DEBUG oslo_vmware.api [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521209bf-78ec-c940-ab62-858a704d7dfe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.303526] env[68674]: DEBUG oslo_vmware.api [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239327, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.487748] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquiring lock "7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 560.487748] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lock "7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 560.600586] env[68674]: DEBUG nova.compute.manager [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 560.748392] env[68674]: DEBUG nova.compute.manager [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 560.754476] env[68674]: DEBUG oslo_vmware.api [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Task: {'id': task-3239328, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.782887] env[68674]: DEBUG oslo_concurrency.lockutils [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 560.783591] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 560.783953] env[68674]: DEBUG oslo_concurrency.lockutils [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 560.802921] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b4ae6ef-62bb-4d30-bd17-03618fa06e07 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.817840] env[68674]: DEBUG oslo_vmware.api [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239327, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.669635} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.818269] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Copied Virtual Disk [datastore2] vmware_temp/58f17604-1abc-424c-89e5-a76eab5adf8c/b84d9354-ef6b-46ca-9dae-6549fa89bbea/tmp-sparse.vmdk to [datastore2] vmware_temp/58f17604-1abc-424c-89e5-a76eab5adf8c/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 560.818459] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Deleting the datastore file [datastore2] vmware_temp/58f17604-1abc-424c-89e5-a76eab5adf8c/b84d9354-ef6b-46ca-9dae-6549fa89bbea/tmp-sparse.vmdk {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 560.819642] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f3a2076-e92c-4a2f-b790-b0b8bd7b9cd9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.825702] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-07206ca5-dee6-4ff7-9563-1c6e195fc9f5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.866908] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02ba0b24-7be9-469b-8bb9-6e405621bb58 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.870405] env[68674]: DEBUG oslo_vmware.api [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Waiting for the task: (returnval){ [ 560.870405] env[68674]: value = "task-3239329" [ 560.870405] env[68674]: _type = "Task" [ 560.870405] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.879610] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-652b3dd6-bcfb-4051-8f10-fc8f2c8a1248 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.888915] env[68674]: DEBUG oslo_vmware.api [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239329, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.044743} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.889926] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 560.890279] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Moving file from [datastore2] vmware_temp/58f17604-1abc-424c-89e5-a76eab5adf8c/b84d9354-ef6b-46ca-9dae-6549fa89bbea to [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea. {{(pid=68674) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 560.890357] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-424cef90-646c-47c3-814a-2294e7600caf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.902275] env[68674]: DEBUG nova.compute.provider_tree [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 560.908360] env[68674]: DEBUG oslo_vmware.api [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Waiting for the task: (returnval){ [ 560.908360] env[68674]: value = "task-3239330" [ 560.908360] env[68674]: _type = "Task" [ 560.908360] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.916167] env[68674]: DEBUG oslo_vmware.api [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239330, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.989864] env[68674]: DEBUG nova.compute.manager [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 561.150231] env[68674]: DEBUG nova.network.neutron [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Successfully created port: 38369d37-449e-4f62-940b-9700d870d8c6 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 561.249257] env[68674]: DEBUG oslo_vmware.api [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Task: {'id': task-3239328, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.668563} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 561.249257] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 58830b0e-dbf3-424d-8b9a-bb298b6bea21/58830b0e-dbf3-424d-8b9a-bb298b6bea21.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 561.249257] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 561.249257] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-26fd66f8-0769-4791-b8a0-21a758516919 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.258625] env[68674]: DEBUG oslo_vmware.api [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Waiting for the task: (returnval){ [ 561.258625] env[68674]: value = "task-3239331" [ 561.258625] env[68674]: _type = "Task" [ 561.258625] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 561.272502] env[68674]: DEBUG oslo_vmware.api [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Task: {'id': task-3239331, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.288675] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 561.421082] env[68674]: DEBUG oslo_vmware.api [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239330, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.027419} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 561.421231] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] File moved {{(pid=68674) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 561.421397] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Cleaning up location [datastore2] vmware_temp/58f17604-1abc-424c-89e5-a76eab5adf8c {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 561.421566] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Deleting the datastore file [datastore2] vmware_temp/58f17604-1abc-424c-89e5-a76eab5adf8c {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 561.421884] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cf22cb83-c3f1-4a4d-957d-2383df86dfdc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.431314] env[68674]: ERROR nova.scheduler.client.report [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [req-3b49bf51-a310-4e84-ac7d-66ca93040839] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ade3f042-7427-494b-9654-0b65e074850c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-3b49bf51-a310-4e84-ac7d-66ca93040839"}]} [ 561.434959] env[68674]: DEBUG oslo_vmware.api [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Waiting for the task: (returnval){ [ 561.434959] env[68674]: value = "task-3239332" [ 561.434959] env[68674]: _type = "Task" [ 561.434959] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 561.446425] env[68674]: DEBUG oslo_vmware.api [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239332, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.524026] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 561.541639] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Acquiring lock "503e9328-bbd8-414f-8bea-250ed8247d67" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 561.541639] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Lock "503e9328-bbd8-414f-8bea-250ed8247d67" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 561.543499] env[68674]: DEBUG nova.scheduler.client.report [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Refreshing inventories for resource provider ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 561.574737] env[68674]: DEBUG nova.scheduler.client.report [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Updating ProviderTree inventory for provider ade3f042-7427-494b-9654-0b65e074850c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 561.574737] env[68674]: DEBUG nova.compute.provider_tree [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 121, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 561.587546] env[68674]: DEBUG nova.compute.manager [req-236578b3-bc95-476f-988e-a711f7a0309e req-06bf9047-8c4d-48e3-92ac-36f3b5163238 service nova] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Received event network-changed-27bb7642-85fc-46b6-9ac7-8a3a6db3271a {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 561.587546] env[68674]: DEBUG nova.compute.manager [req-236578b3-bc95-476f-988e-a711f7a0309e req-06bf9047-8c4d-48e3-92ac-36f3b5163238 service nova] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Refreshing instance network info cache due to event network-changed-27bb7642-85fc-46b6-9ac7-8a3a6db3271a. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 561.587546] env[68674]: DEBUG oslo_concurrency.lockutils [req-236578b3-bc95-476f-988e-a711f7a0309e req-06bf9047-8c4d-48e3-92ac-36f3b5163238 service nova] Acquiring lock "refresh_cache-58830b0e-dbf3-424d-8b9a-bb298b6bea21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 561.587546] env[68674]: DEBUG oslo_concurrency.lockutils [req-236578b3-bc95-476f-988e-a711f7a0309e req-06bf9047-8c4d-48e3-92ac-36f3b5163238 service nova] Acquired lock "refresh_cache-58830b0e-dbf3-424d-8b9a-bb298b6bea21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 561.587997] env[68674]: DEBUG nova.network.neutron [req-236578b3-bc95-476f-988e-a711f7a0309e req-06bf9047-8c4d-48e3-92ac-36f3b5163238 service nova] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Refreshing network info cache for port 27bb7642-85fc-46b6-9ac7-8a3a6db3271a {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 561.594115] env[68674]: DEBUG nova.scheduler.client.report [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Refreshing aggregate associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, aggregates: None {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 561.614495] env[68674]: DEBUG nova.compute.manager [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 561.628570] env[68674]: DEBUG nova.scheduler.client.report [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Refreshing trait associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 561.640752] env[68674]: DEBUG nova.virt.hardware [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 561.641035] env[68674]: DEBUG nova.virt.hardware [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 561.641191] env[68674]: DEBUG nova.virt.hardware [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 561.641371] env[68674]: DEBUG nova.virt.hardware [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 561.642087] env[68674]: DEBUG nova.virt.hardware [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 561.642087] env[68674]: DEBUG nova.virt.hardware [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 561.642087] env[68674]: DEBUG nova.virt.hardware [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 561.642087] env[68674]: DEBUG nova.virt.hardware [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 561.642410] env[68674]: DEBUG nova.virt.hardware [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 561.642410] env[68674]: DEBUG nova.virt.hardware [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 561.642508] env[68674]: DEBUG nova.virt.hardware [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 561.644299] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a54d70fa-3fc4-424a-aa73-96093ba93703 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.654935] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8588aee5-dcc1-496a-8172-c6adaec4bb24 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.773896] env[68674]: DEBUG oslo_vmware.api [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Task: {'id': task-3239331, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075209} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 561.779649] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 561.781693] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-156fd648-9f0f-40db-b9c9-6f5e54906347 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.810823] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] 58830b0e-dbf3-424d-8b9a-bb298b6bea21/58830b0e-dbf3-424d-8b9a-bb298b6bea21.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 561.813447] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d71573d6-5ec6-4bf6-aca6-fad45bb262a5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.836926] env[68674]: DEBUG oslo_vmware.api [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Waiting for the task: (returnval){ [ 561.836926] env[68674]: value = "task-3239333" [ 561.836926] env[68674]: _type = "Task" [ 561.836926] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 561.846653] env[68674]: DEBUG oslo_vmware.api [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Task: {'id': task-3239333, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.855452] env[68674]: DEBUG nova.network.neutron [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Successfully updated port: a459b31b-865e-45d7-a62b-b7c95eb50c15 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 561.910359] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-165ddd88-6044-47f9-8d83-06b19bce08fe {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.915357] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4be3c7f5-ecc0-4a62-be17-c175c237614d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.965286] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c866e43-ae23-45d2-a76a-9be7f8f694cf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.982852] env[68674]: DEBUG oslo_vmware.api [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239332, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.027532} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 561.987123] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fc001a1-d01c-4463-b825-b7f0ffdb8730 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.988492] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 561.989280] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7f50f4e-db7a-4833-b0cb-d40dbb3aa5cc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.004872] env[68674]: DEBUG nova.compute.provider_tree [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 562.007581] env[68674]: DEBUG oslo_vmware.api [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Waiting for the task: (returnval){ [ 562.007581] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52de8215-dddd-c6a0-8aaa-b5ed00eedbe4" [ 562.007581] env[68674]: _type = "Task" [ 562.007581] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 562.018675] env[68674]: DEBUG nova.compute.manager [None req-236b4be9-9a0e-4868-aef2-ffc1d2e5f890 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 562.019056] env[68674]: DEBUG oslo_vmware.api [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52de8215-dddd-c6a0-8aaa-b5ed00eedbe4, 'name': SearchDatastore_Task, 'duration_secs': 0.010681} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 562.020463] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55edb833-45c6-4a05-8365-2d6c13d6904a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.025939] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 562.026274] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 89ccc16e-d0e5-4f7d-985c-8693188e7ed5/89ccc16e-d0e5-4f7d-985c-8693188e7ed5.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 562.026923] env[68674]: DEBUG oslo_concurrency.lockutils [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 562.027165] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 562.027487] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4ba62db8-da98-411f-85d8-7e6a805d7fbb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.029932] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-95254ca4-8655-4988-ac23-3003cf976d17 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.039330] env[68674]: DEBUG oslo_vmware.api [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Waiting for the task: (returnval){ [ 562.039330] env[68674]: value = "task-3239334" [ 562.039330] env[68674]: _type = "Task" [ 562.039330] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 562.050256] env[68674]: DEBUG nova.compute.manager [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 562.052909] env[68674]: DEBUG oslo_vmware.api [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239334, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 562.062419] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 562.062603] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 562.063632] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-094989f8-9884-4f9f-a1a3-c33c757bc8b3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.072209] env[68674]: DEBUG oslo_vmware.api [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 562.072209] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5224fb9f-798d-9c76-5755-838c02dadc60" [ 562.072209] env[68674]: _type = "Task" [ 562.072209] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 562.086135] env[68674]: DEBUG oslo_vmware.api [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5224fb9f-798d-9c76-5755-838c02dadc60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 562.351886] env[68674]: DEBUG oslo_vmware.api [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Task: {'id': task-3239333, 'name': ReconfigVM_Task, 'duration_secs': 0.341846} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 562.352219] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Reconfigured VM instance instance-00000002 to attach disk [datastore1] 58830b0e-dbf3-424d-8b9a-bb298b6bea21/58830b0e-dbf3-424d-8b9a-bb298b6bea21.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 562.352875] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-05bad1f2-7cd4-4837-9cb1-b870d3c7baa2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.362935] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "refresh_cache-baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 562.363216] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquired lock "refresh_cache-baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 562.363445] env[68674]: DEBUG nova.network.neutron [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 562.370164] env[68674]: DEBUG oslo_vmware.api [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Waiting for the task: (returnval){ [ 562.370164] env[68674]: value = "task-3239335" [ 562.370164] env[68674]: _type = "Task" [ 562.370164] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 562.380675] env[68674]: DEBUG oslo_vmware.api [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Task: {'id': task-3239335, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 562.538842] env[68674]: INFO nova.compute.manager [None req-236b4be9-9a0e-4868-aef2-ffc1d2e5f890 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] instance snapshotting [ 562.539563] env[68674]: DEBUG nova.objects.instance [None req-236b4be9-9a0e-4868-aef2-ffc1d2e5f890 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Lazy-loading 'flavor' on Instance uuid 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 562.553199] env[68674]: DEBUG oslo_vmware.api [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239334, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 562.564536] env[68674]: DEBUG nova.scheduler.client.report [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Updated inventory for provider ade3f042-7427-494b-9654-0b65e074850c with generation 15 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 562.564809] env[68674]: DEBUG nova.compute.provider_tree [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Updating resource provider ade3f042-7427-494b-9654-0b65e074850c generation from 15 to 16 during operation: update_inventory {{(pid=68674) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 562.565364] env[68674]: DEBUG nova.compute.provider_tree [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 562.591872] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 562.592217] env[68674]: DEBUG oslo_vmware.api [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5224fb9f-798d-9c76-5755-838c02dadc60, 'name': SearchDatastore_Task, 'duration_secs': 0.009602} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 562.593584] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b685395a-cc49-4291-9194-72884ac1d363 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.602078] env[68674]: DEBUG oslo_vmware.api [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 562.602078] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52cbb795-4510-f393-d0d1-995797084ea4" [ 562.602078] env[68674]: _type = "Task" [ 562.602078] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 562.612100] env[68674]: DEBUG oslo_vmware.api [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52cbb795-4510-f393-d0d1-995797084ea4, 'name': SearchDatastore_Task, 'duration_secs': 0.008658} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 562.612369] env[68674]: DEBUG oslo_concurrency.lockutils [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 562.612623] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] e84db5bd-b6ec-42ef-9c34-a4160c44d973/e84db5bd-b6ec-42ef-9c34-a4160c44d973.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 562.612878] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a73dc236-fe8c-4d9f-aa9b-176ce0f93c15 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.619948] env[68674]: DEBUG oslo_vmware.api [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 562.619948] env[68674]: value = "task-3239336" [ 562.619948] env[68674]: _type = "Task" [ 562.619948] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 562.632309] env[68674]: DEBUG oslo_vmware.api [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239336, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 562.817239] env[68674]: DEBUG nova.network.neutron [req-236578b3-bc95-476f-988e-a711f7a0309e req-06bf9047-8c4d-48e3-92ac-36f3b5163238 service nova] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Updated VIF entry in instance network info cache for port 27bb7642-85fc-46b6-9ac7-8a3a6db3271a. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 562.817732] env[68674]: DEBUG nova.network.neutron [req-236578b3-bc95-476f-988e-a711f7a0309e req-06bf9047-8c4d-48e3-92ac-36f3b5163238 service nova] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Updating instance_info_cache with network_info: [{"id": "27bb7642-85fc-46b6-9ac7-8a3a6db3271a", "address": "fa:16:3e:55:ec:a9", "network": {"id": "a8c05d42-7059-4056-96f9-f929e862948a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-2017205475-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c18d4b8073be4aa59800f2c6f482fdad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4c5eb94-841c-4713-985a-8fc4117fbaf1", "external-id": "nsx-vlan-transportzone-425", "segmentation_id": 425, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27bb7642-85", "ovs_interfaceid": "27bb7642-85fc-46b6-9ac7-8a3a6db3271a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 562.883169] env[68674]: DEBUG oslo_vmware.api [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Task: {'id': task-3239335, 'name': Rename_Task, 'duration_secs': 0.265663} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 562.883656] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 562.884085] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-08f238ed-7bd6-4f38-9119-6dba7692bc2b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.893950] env[68674]: DEBUG oslo_vmware.api [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Waiting for the task: (returnval){ [ 562.893950] env[68674]: value = "task-3239337" [ 562.893950] env[68674]: _type = "Task" [ 562.893950] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 562.905572] env[68674]: DEBUG oslo_vmware.api [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Task: {'id': task-3239337, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.013866] env[68674]: DEBUG nova.network.neutron [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 563.050288] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0610a4d3-8877-4ae8-b8f0-2b2bd7f15ff1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.056714] env[68674]: DEBUG oslo_vmware.api [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239334, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.547477} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 563.057618] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 89ccc16e-d0e5-4f7d-985c-8693188e7ed5/89ccc16e-d0e5-4f7d-985c-8693188e7ed5.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 563.060339] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 563.060339] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d7b494b9-0763-44e0-a289-a63334f5e5ac {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.077019] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.482s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 563.077019] env[68674]: DEBUG nova.compute.manager [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 563.082378] env[68674]: DEBUG oslo_concurrency.lockutils [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.381s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 563.084178] env[68674]: INFO nova.compute.claims [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 563.087384] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c18a147-afff-4787-b9ff-a81772567a4e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.095443] env[68674]: DEBUG oslo_vmware.api [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Waiting for the task: (returnval){ [ 563.095443] env[68674]: value = "task-3239338" [ 563.095443] env[68674]: _type = "Task" [ 563.095443] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 563.114463] env[68674]: DEBUG oslo_vmware.api [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239338, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.133745] env[68674]: DEBUG oslo_vmware.api [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239336, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48451} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 563.134279] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] e84db5bd-b6ec-42ef-9c34-a4160c44d973/e84db5bd-b6ec-42ef-9c34-a4160c44d973.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 563.138049] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 563.138049] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9af85079-af03-48ec-9a95-408a7ec7cbf4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.148666] env[68674]: DEBUG oslo_vmware.api [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 563.148666] env[68674]: value = "task-3239339" [ 563.148666] env[68674]: _type = "Task" [ 563.148666] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 563.164961] env[68674]: DEBUG oslo_vmware.api [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239339, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.321097] env[68674]: DEBUG oslo_concurrency.lockutils [req-236578b3-bc95-476f-988e-a711f7a0309e req-06bf9047-8c4d-48e3-92ac-36f3b5163238 service nova] Releasing lock "refresh_cache-58830b0e-dbf3-424d-8b9a-bb298b6bea21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 563.321275] env[68674]: DEBUG nova.compute.manager [req-236578b3-bc95-476f-988e-a711f7a0309e req-06bf9047-8c4d-48e3-92ac-36f3b5163238 service nova] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Received event network-vif-plugged-3c8004db-9f93-48c8-9861-4f8e8f1cd2c9 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 563.321480] env[68674]: DEBUG oslo_concurrency.lockutils [req-236578b3-bc95-476f-988e-a711f7a0309e req-06bf9047-8c4d-48e3-92ac-36f3b5163238 service nova] Acquiring lock "e84db5bd-b6ec-42ef-9c34-a4160c44d973-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 563.321706] env[68674]: DEBUG oslo_concurrency.lockutils [req-236578b3-bc95-476f-988e-a711f7a0309e req-06bf9047-8c4d-48e3-92ac-36f3b5163238 service nova] Lock "e84db5bd-b6ec-42ef-9c34-a4160c44d973-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 563.321989] env[68674]: DEBUG oslo_concurrency.lockutils [req-236578b3-bc95-476f-988e-a711f7a0309e req-06bf9047-8c4d-48e3-92ac-36f3b5163238 service nova] Lock "e84db5bd-b6ec-42ef-9c34-a4160c44d973-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 563.322100] env[68674]: DEBUG nova.compute.manager [req-236578b3-bc95-476f-988e-a711f7a0309e req-06bf9047-8c4d-48e3-92ac-36f3b5163238 service nova] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] No waiting events found dispatching network-vif-plugged-3c8004db-9f93-48c8-9861-4f8e8f1cd2c9 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 563.322709] env[68674]: WARNING nova.compute.manager [req-236578b3-bc95-476f-988e-a711f7a0309e req-06bf9047-8c4d-48e3-92ac-36f3b5163238 service nova] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Received unexpected event network-vif-plugged-3c8004db-9f93-48c8-9861-4f8e8f1cd2c9 for instance with vm_state building and task_state spawning. [ 563.322709] env[68674]: DEBUG nova.compute.manager [req-236578b3-bc95-476f-988e-a711f7a0309e req-06bf9047-8c4d-48e3-92ac-36f3b5163238 service nova] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Received event network-changed-3c8004db-9f93-48c8-9861-4f8e8f1cd2c9 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 563.322709] env[68674]: DEBUG nova.compute.manager [req-236578b3-bc95-476f-988e-a711f7a0309e req-06bf9047-8c4d-48e3-92ac-36f3b5163238 service nova] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Refreshing instance network info cache due to event network-changed-3c8004db-9f93-48c8-9861-4f8e8f1cd2c9. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 563.322855] env[68674]: DEBUG oslo_concurrency.lockutils [req-236578b3-bc95-476f-988e-a711f7a0309e req-06bf9047-8c4d-48e3-92ac-36f3b5163238 service nova] Acquiring lock "refresh_cache-e84db5bd-b6ec-42ef-9c34-a4160c44d973" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 563.322887] env[68674]: DEBUG oslo_concurrency.lockutils [req-236578b3-bc95-476f-988e-a711f7a0309e req-06bf9047-8c4d-48e3-92ac-36f3b5163238 service nova] Acquired lock "refresh_cache-e84db5bd-b6ec-42ef-9c34-a4160c44d973" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 563.323562] env[68674]: DEBUG nova.network.neutron [req-236578b3-bc95-476f-988e-a711f7a0309e req-06bf9047-8c4d-48e3-92ac-36f3b5163238 service nova] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Refreshing network info cache for port 3c8004db-9f93-48c8-9861-4f8e8f1cd2c9 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 563.407814] env[68674]: DEBUG oslo_vmware.api [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Task: {'id': task-3239337, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.455404] env[68674]: DEBUG nova.network.neutron [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Updating instance_info_cache with network_info: [{"id": "a459b31b-865e-45d7-a62b-b7c95eb50c15", "address": "fa:16:3e:0c:f2:5f", "network": {"id": "d0e868c8-42eb-4685-8a15-4b3a8cc40530", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-379831430-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fa7abd14180453bb12e9dd5fc24523f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa459b31b-86", "ovs_interfaceid": "a459b31b-865e-45d7-a62b-b7c95eb50c15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 563.598568] env[68674]: DEBUG nova.compute.utils [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 563.600841] env[68674]: DEBUG nova.compute.manager [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 563.600929] env[68674]: DEBUG nova.network.neutron [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 563.616565] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-236b4be9-9a0e-4868-aef2-ffc1d2e5f890 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Creating Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 563.616890] env[68674]: DEBUG oslo_vmware.api [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239338, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.136493} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 563.617967] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-b4923a67-0186-427d-9cb4-487c6ef7f0f7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.621349] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 563.623090] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db065530-a273-41e0-803f-3e25610ac841 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.649730] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Reconfiguring VM instance instance-00000004 to attach disk [datastore2] 89ccc16e-d0e5-4f7d-985c-8693188e7ed5/89ccc16e-d0e5-4f7d-985c-8693188e7ed5.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 563.654420] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b6bc45c1-4456-426f-80f9-8db2dab3d1e0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.678582] env[68674]: DEBUG oslo_vmware.api [None req-236b4be9-9a0e-4868-aef2-ffc1d2e5f890 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Waiting for the task: (returnval){ [ 563.678582] env[68674]: value = "task-3239340" [ 563.678582] env[68674]: _type = "Task" [ 563.678582] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 563.691351] env[68674]: DEBUG oslo_vmware.api [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Waiting for the task: (returnval){ [ 563.691351] env[68674]: value = "task-3239341" [ 563.691351] env[68674]: _type = "Task" [ 563.691351] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 563.700551] env[68674]: DEBUG oslo_vmware.api [None req-236b4be9-9a0e-4868-aef2-ffc1d2e5f890 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Task: {'id': task-3239340, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.701460] env[68674]: DEBUG oslo_vmware.api [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239339, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.108581} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 563.705512] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 563.706589] env[68674]: DEBUG nova.policy [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '666d0d2bb76a41948cdb6df7b700429d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'be6579f30b2c418c98a5a373176baf05', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 563.708472] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77d2a604-3ec5-4f22-8976-f08f645c9ff7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.737773] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Reconfiguring VM instance instance-00000003 to attach disk [datastore2] e84db5bd-b6ec-42ef-9c34-a4160c44d973/e84db5bd-b6ec-42ef-9c34-a4160c44d973.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 563.742397] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9c50e06-3b43-471d-bfb2-acffef625a15 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.761392] env[68674]: DEBUG oslo_vmware.api [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239341, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.768692] env[68674]: DEBUG oslo_vmware.api [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 563.768692] env[68674]: value = "task-3239342" [ 563.768692] env[68674]: _type = "Task" [ 563.768692] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 563.779700] env[68674]: DEBUG oslo_vmware.api [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239342, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.802422] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Acquiring lock "0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 563.802763] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Lock "0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 563.803092] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Acquiring lock "0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 563.803482] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Lock "0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 563.803727] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Lock "0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 563.809419] env[68674]: INFO nova.compute.manager [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Terminating instance [ 563.907916] env[68674]: DEBUG oslo_vmware.api [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Task: {'id': task-3239337, 'name': PowerOnVM_Task, 'duration_secs': 0.88845} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 563.907916] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 563.907916] env[68674]: INFO nova.compute.manager [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Took 11.42 seconds to spawn the instance on the hypervisor. [ 563.907916] env[68674]: DEBUG nova.compute.manager [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 563.908372] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9daf91de-6bc6-4f2d-918d-329ff166f6aa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.959090] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Releasing lock "refresh_cache-baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 563.959090] env[68674]: DEBUG nova.compute.manager [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Instance network_info: |[{"id": "a459b31b-865e-45d7-a62b-b7c95eb50c15", "address": "fa:16:3e:0c:f2:5f", "network": {"id": "d0e868c8-42eb-4685-8a15-4b3a8cc40530", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-379831430-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fa7abd14180453bb12e9dd5fc24523f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa459b31b-86", "ovs_interfaceid": "a459b31b-865e-45d7-a62b-b7c95eb50c15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 563.959445] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:f2:5f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b5c34919-7d52-4a52-bab1-81af4c8182ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a459b31b-865e-45d7-a62b-b7c95eb50c15', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 563.967427] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Creating folder: Project (3fa7abd14180453bb12e9dd5fc24523f). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 563.968110] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cd88c617-ff4b-4bdf-9ca7-5f14ca8acc38 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.986186] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Created folder: Project (3fa7abd14180453bb12e9dd5fc24523f) in parent group-v647377. [ 563.986585] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Creating folder: Instances. Parent ref: group-v647390. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 563.987199] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2147766d-0b95-44b8-bdc9-8483b53c66cb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.003053] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Created folder: Instances in parent group-v647390. [ 564.004031] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 564.004031] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 564.004031] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-57132ef2-9edc-4f1f-ae30-b108cde731df {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.028962] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 564.028962] env[68674]: value = "task-3239345" [ 564.028962] env[68674]: _type = "Task" [ 564.028962] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 564.039851] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239345, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 564.113217] env[68674]: DEBUG nova.compute.manager [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 564.207400] env[68674]: DEBUG oslo_vmware.api [None req-236b4be9-9a0e-4868-aef2-ffc1d2e5f890 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Task: {'id': task-3239340, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 564.222862] env[68674]: DEBUG oslo_vmware.api [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239341, 'name': ReconfigVM_Task, 'duration_secs': 0.418181} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 564.223266] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Reconfigured VM instance instance-00000004 to attach disk [datastore2] 89ccc16e-d0e5-4f7d-985c-8693188e7ed5/89ccc16e-d0e5-4f7d-985c-8693188e7ed5.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 564.224097] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5c7f590b-a39c-468f-b7ff-b195ad0fda8a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.238809] env[68674]: DEBUG oslo_vmware.api [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Waiting for the task: (returnval){ [ 564.238809] env[68674]: value = "task-3239346" [ 564.238809] env[68674]: _type = "Task" [ 564.238809] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 564.253195] env[68674]: DEBUG oslo_vmware.api [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239346, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 564.296319] env[68674]: DEBUG oslo_vmware.api [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239342, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 564.310147] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Acquiring lock "refresh_cache-0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 564.310579] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Acquired lock "refresh_cache-0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 564.310579] env[68674]: DEBUG nova.network.neutron [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 564.348918] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e1d2fb1-d13d-4d96-abe9-1ffd1bed3f17 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.359255] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f4124f0-888f-470e-b62f-15af7f2b4748 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.395604] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7784383a-7ac2-43f0-933d-ef39c375b3cd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.405550] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-381ff397-b3b5-402d-a01d-e56e6a19cf8e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.432091] env[68674]: DEBUG nova.compute.provider_tree [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 564.438027] env[68674]: INFO nova.compute.manager [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Took 16.19 seconds to build instance. [ 564.438506] env[68674]: DEBUG nova.network.neutron [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Successfully updated port: 38369d37-449e-4f62-940b-9700d870d8c6 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 564.539215] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239345, 'name': CreateVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 564.699319] env[68674]: DEBUG oslo_vmware.api [None req-236b4be9-9a0e-4868-aef2-ffc1d2e5f890 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Task: {'id': task-3239340, 'name': CreateSnapshot_Task, 'duration_secs': 1.032546} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 564.699319] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-236b4be9-9a0e-4868-aef2-ffc1d2e5f890 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Created Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 564.700314] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-191bf13d-82c6-45a9-8d14-7343ef5c6852 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.756600] env[68674]: DEBUG oslo_vmware.api [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239346, 'name': Rename_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 564.788208] env[68674]: DEBUG nova.network.neutron [req-236578b3-bc95-476f-988e-a711f7a0309e req-06bf9047-8c4d-48e3-92ac-36f3b5163238 service nova] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Updated VIF entry in instance network info cache for port 3c8004db-9f93-48c8-9861-4f8e8f1cd2c9. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 564.788571] env[68674]: DEBUG nova.network.neutron [req-236578b3-bc95-476f-988e-a711f7a0309e req-06bf9047-8c4d-48e3-92ac-36f3b5163238 service nova] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Updating instance_info_cache with network_info: [{"id": "3c8004db-9f93-48c8-9861-4f8e8f1cd2c9", "address": "fa:16:3e:55:c4:43", "network": {"id": "896418b0-8817-49dc-a965-e44ed5221810", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1185393062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3dceab4b22c34737bc85ee5a5ded00d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e7f6f41-f4eb-4832-a390-730fca1cf717", "external-id": "nsx-vlan-transportzone-724", "segmentation_id": 724, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c8004db-9f", "ovs_interfaceid": "3c8004db-9f93-48c8-9861-4f8e8f1cd2c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 564.792846] env[68674]: DEBUG oslo_vmware.api [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239342, 'name': ReconfigVM_Task, 'duration_secs': 0.662296} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 564.793281] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Reconfigured VM instance instance-00000003 to attach disk [datastore2] e84db5bd-b6ec-42ef-9c34-a4160c44d973/e84db5bd-b6ec-42ef-9c34-a4160c44d973.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 564.793892] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8d136a67-047c-41d9-9e37-5300f2d3330a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.803231] env[68674]: DEBUG oslo_vmware.api [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 564.803231] env[68674]: value = "task-3239347" [ 564.803231] env[68674]: _type = "Task" [ 564.803231] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 564.815082] env[68674]: DEBUG oslo_vmware.api [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239347, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 564.869321] env[68674]: DEBUG nova.network.neutron [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 564.936487] env[68674]: DEBUG nova.scheduler.client.report [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 564.940965] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7aff0618-370f-42b6-9afc-d885c4b9d607 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Lock "58830b0e-dbf3-424d-8b9a-bb298b6bea21" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.704s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 564.942653] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "refresh_cache-02d4aee3-7267-4658-a277-8a9a00dd9f6e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 564.942653] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquired lock "refresh_cache-02d4aee3-7267-4658-a277-8a9a00dd9f6e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 564.942653] env[68674]: DEBUG nova.network.neutron [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 565.044267] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239345, 'name': CreateVM_Task, 'duration_secs': 0.608892} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 565.044506] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 565.046892] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 565.046892] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 565.047190] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 565.047443] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28bced3f-fdf1-4679-be5b-7507ed2810ba {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.056258] env[68674]: DEBUG oslo_vmware.api [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 565.056258] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ea9470-df91-d033-eab5-c96071277330" [ 565.056258] env[68674]: _type = "Task" [ 565.056258] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 565.068133] env[68674]: DEBUG nova.compute.manager [req-3cf0b5ed-80da-4b61-998d-eb56a857e902 req-8e1efb97-6a2a-47a3-b3d2-709db5507bb7 service nova] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Received event network-vif-plugged-a459b31b-865e-45d7-a62b-b7c95eb50c15 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 565.068377] env[68674]: DEBUG oslo_concurrency.lockutils [req-3cf0b5ed-80da-4b61-998d-eb56a857e902 req-8e1efb97-6a2a-47a3-b3d2-709db5507bb7 service nova] Acquiring lock "baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 565.069013] env[68674]: DEBUG oslo_concurrency.lockutils [req-3cf0b5ed-80da-4b61-998d-eb56a857e902 req-8e1efb97-6a2a-47a3-b3d2-709db5507bb7 service nova] Lock "baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 565.069013] env[68674]: DEBUG oslo_concurrency.lockutils [req-3cf0b5ed-80da-4b61-998d-eb56a857e902 req-8e1efb97-6a2a-47a3-b3d2-709db5507bb7 service nova] Lock "baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 565.069013] env[68674]: DEBUG nova.compute.manager [req-3cf0b5ed-80da-4b61-998d-eb56a857e902 req-8e1efb97-6a2a-47a3-b3d2-709db5507bb7 service nova] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] No waiting events found dispatching network-vif-plugged-a459b31b-865e-45d7-a62b-b7c95eb50c15 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 565.069159] env[68674]: WARNING nova.compute.manager [req-3cf0b5ed-80da-4b61-998d-eb56a857e902 req-8e1efb97-6a2a-47a3-b3d2-709db5507bb7 service nova] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Received unexpected event network-vif-plugged-a459b31b-865e-45d7-a62b-b7c95eb50c15 for instance with vm_state building and task_state spawning. [ 565.069248] env[68674]: DEBUG nova.compute.manager [req-3cf0b5ed-80da-4b61-998d-eb56a857e902 req-8e1efb97-6a2a-47a3-b3d2-709db5507bb7 service nova] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Received event network-changed-a459b31b-865e-45d7-a62b-b7c95eb50c15 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 565.069372] env[68674]: DEBUG nova.compute.manager [req-3cf0b5ed-80da-4b61-998d-eb56a857e902 req-8e1efb97-6a2a-47a3-b3d2-709db5507bb7 service nova] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Refreshing instance network info cache due to event network-changed-a459b31b-865e-45d7-a62b-b7c95eb50c15. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 565.069902] env[68674]: DEBUG oslo_concurrency.lockutils [req-3cf0b5ed-80da-4b61-998d-eb56a857e902 req-8e1efb97-6a2a-47a3-b3d2-709db5507bb7 service nova] Acquiring lock "refresh_cache-baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 565.069902] env[68674]: DEBUG oslo_concurrency.lockutils [req-3cf0b5ed-80da-4b61-998d-eb56a857e902 req-8e1efb97-6a2a-47a3-b3d2-709db5507bb7 service nova] Acquired lock "refresh_cache-baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 565.069902] env[68674]: DEBUG nova.network.neutron [req-3cf0b5ed-80da-4b61-998d-eb56a857e902 req-8e1efb97-6a2a-47a3-b3d2-709db5507bb7 service nova] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Refreshing network info cache for port a459b31b-865e-45d7-a62b-b7c95eb50c15 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 565.087471] env[68674]: DEBUG oslo_vmware.api [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ea9470-df91-d033-eab5-c96071277330, 'name': SearchDatastore_Task, 'duration_secs': 0.013503} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 565.088657] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 565.088657] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 565.088818] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 565.088969] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 565.089159] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 565.089712] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-491b6916-2951-46a8-b828-9c066b83a2db {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.103846] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 565.103846] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 565.104940] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-184358ea-60fe-4852-8633-f7f6427ac3e7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.114209] env[68674]: DEBUG oslo_vmware.api [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 565.114209] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52848b6a-9bf8-980c-4cf6-11b509394890" [ 565.114209] env[68674]: _type = "Task" [ 565.114209] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 565.121998] env[68674]: DEBUG nova.compute.manager [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 565.128404] env[68674]: DEBUG oslo_vmware.api [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52848b6a-9bf8-980c-4cf6-11b509394890, 'name': SearchDatastore_Task, 'duration_secs': 0.011148} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 565.129229] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb62b85d-b499-40ee-b655-ba53968a4e45 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.135663] env[68674]: DEBUG oslo_vmware.api [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 565.135663] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5253d974-5fe5-dfcb-f1c3-f92806611ea4" [ 565.135663] env[68674]: _type = "Task" [ 565.135663] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 565.148536] env[68674]: DEBUG oslo_vmware.api [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5253d974-5fe5-dfcb-f1c3-f92806611ea4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 565.176835] env[68674]: DEBUG nova.virt.hardware [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 565.180374] env[68674]: DEBUG nova.virt.hardware [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 565.180665] env[68674]: DEBUG nova.virt.hardware [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 565.180665] env[68674]: DEBUG nova.virt.hardware [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 565.180866] env[68674]: DEBUG nova.virt.hardware [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 565.180992] env[68674]: DEBUG nova.virt.hardware [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 565.181220] env[68674]: DEBUG nova.virt.hardware [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 565.181382] env[68674]: DEBUG nova.virt.hardware [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 565.181553] env[68674]: DEBUG nova.virt.hardware [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 565.182031] env[68674]: DEBUG nova.virt.hardware [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 565.182031] env[68674]: DEBUG nova.virt.hardware [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 565.182827] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9fe6731-5be9-435f-b445-18a47e17d781 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.193949] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-257c94f9-9627-4778-9327-744d6b005dbf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.216313] env[68674]: DEBUG nova.compute.manager [None req-236b4be9-9a0e-4868-aef2-ffc1d2e5f890 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Instance disappeared during snapshot {{(pid=68674) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 565.223697] env[68674]: DEBUG nova.network.neutron [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 565.263398] env[68674]: DEBUG oslo_vmware.api [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239346, 'name': Rename_Task, 'duration_secs': 0.94715} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 565.267480] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 565.269548] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-149bb001-b35d-4b7a-8fb2-ef6586350c70 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.278405] env[68674]: DEBUG oslo_vmware.api [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Waiting for the task: (returnval){ [ 565.278405] env[68674]: value = "task-3239348" [ 565.278405] env[68674]: _type = "Task" [ 565.278405] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 565.288479] env[68674]: DEBUG oslo_vmware.api [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239348, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 565.295062] env[68674]: DEBUG nova.network.neutron [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Successfully created port: 81afa256-db6b-44e2-944a-7654579b8b50 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 565.295062] env[68674]: DEBUG oslo_concurrency.lockutils [req-236578b3-bc95-476f-988e-a711f7a0309e req-06bf9047-8c4d-48e3-92ac-36f3b5163238 service nova] Releasing lock "refresh_cache-e84db5bd-b6ec-42ef-9c34-a4160c44d973" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 565.316708] env[68674]: DEBUG oslo_vmware.api [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239347, 'name': Rename_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 565.425809] env[68674]: DEBUG nova.compute.manager [None req-236b4be9-9a0e-4868-aef2-ffc1d2e5f890 tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Found 0 images (rotation: 2) {{(pid=68674) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 565.444597] env[68674]: DEBUG oslo_concurrency.lockutils [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.362s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 565.445204] env[68674]: DEBUG nova.compute.manager [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 565.452553] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 5.555s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 565.452736] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 565.452960] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68674) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 565.453332] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.165s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 565.454901] env[68674]: INFO nova.compute.claims [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 565.459396] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b84991d3-dd3c-42e5-ab41-9b437b2441b8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.472763] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f1a1c47-e5ea-4a58-995b-2039cc280f7a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.492400] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01fd301c-051b-4317-9a67-eb34111addc3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.501287] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-523ce481-c6dc-48ec-9849-5462a99319c8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.536931] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181038MB free_disk=120GB free_vcpus=48 pci_devices=None {{(pid=68674) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 565.537108] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 565.554216] env[68674]: DEBUG nova.network.neutron [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 565.651225] env[68674]: DEBUG oslo_vmware.api [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5253d974-5fe5-dfcb-f1c3-f92806611ea4, 'name': SearchDatastore_Task, 'duration_secs': 0.017628} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 565.651489] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 565.653034] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82/baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 565.653034] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-667247a8-b2d9-4032-a893-845731faa751 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.662576] env[68674]: DEBUG oslo_vmware.api [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 565.662576] env[68674]: value = "task-3239349" [ 565.662576] env[68674]: _type = "Task" [ 565.662576] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 565.680363] env[68674]: DEBUG oslo_vmware.api [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239349, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 565.727845] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Releasing lock "refresh_cache-0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 565.727845] env[68674]: DEBUG nova.compute.manager [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 565.727845] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 565.732682] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-257f44d1-52de-4bfd-8444-071e62de823a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.744118] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 565.744416] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9e1d65fb-ab24-46b7-81d1-4aa1f50b2927 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.754995] env[68674]: DEBUG oslo_vmware.api [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Waiting for the task: (returnval){ [ 565.754995] env[68674]: value = "task-3239350" [ 565.754995] env[68674]: _type = "Task" [ 565.754995] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 565.769148] env[68674]: DEBUG oslo_vmware.api [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Task: {'id': task-3239350, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 565.792846] env[68674]: DEBUG oslo_vmware.api [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239348, 'name': PowerOnVM_Task, 'duration_secs': 0.444423} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 565.793147] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 565.793342] env[68674]: INFO nova.compute.manager [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Took 8.79 seconds to spawn the instance on the hypervisor. [ 565.793562] env[68674]: DEBUG nova.compute.manager [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 565.794313] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a3ed1ff-a4a1-410f-a65c-959b1e8aa8fb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.819311] env[68674]: DEBUG oslo_vmware.api [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239347, 'name': Rename_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 565.963479] env[68674]: DEBUG nova.compute.utils [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 565.965070] env[68674]: DEBUG nova.compute.manager [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 565.965200] env[68674]: DEBUG nova.network.neutron [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 566.175622] env[68674]: DEBUG oslo_vmware.api [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239349, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 566.183822] env[68674]: DEBUG nova.policy [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '666d0d2bb76a41948cdb6df7b700429d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'be6579f30b2c418c98a5a373176baf05', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 566.270254] env[68674]: DEBUG oslo_vmware.api [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Task: {'id': task-3239350, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 566.271431] env[68674]: DEBUG nova.network.neutron [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Updating instance_info_cache with network_info: [{"id": "38369d37-449e-4f62-940b-9700d870d8c6", "address": "fa:16:3e:36:cb:68", "network": {"id": "d0e868c8-42eb-4685-8a15-4b3a8cc40530", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-379831430-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fa7abd14180453bb12e9dd5fc24523f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38369d37-44", "ovs_interfaceid": "38369d37-449e-4f62-940b-9700d870d8c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 566.321717] env[68674]: INFO nova.compute.manager [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Took 15.26 seconds to build instance. [ 566.331813] env[68674]: DEBUG oslo_vmware.api [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239347, 'name': Rename_Task, 'duration_secs': 1.347827} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 566.332604] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 566.332919] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-95faf9e0-770b-4499-8b69-a4b701637586 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.344215] env[68674]: DEBUG oslo_vmware.api [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 566.344215] env[68674]: value = "task-3239351" [ 566.344215] env[68674]: _type = "Task" [ 566.344215] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 566.357212] env[68674]: DEBUG oslo_vmware.api [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239351, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 566.472877] env[68674]: DEBUG nova.compute.manager [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 566.635824] env[68674]: DEBUG nova.network.neutron [req-3cf0b5ed-80da-4b61-998d-eb56a857e902 req-8e1efb97-6a2a-47a3-b3d2-709db5507bb7 service nova] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Updated VIF entry in instance network info cache for port a459b31b-865e-45d7-a62b-b7c95eb50c15. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 566.635824] env[68674]: DEBUG nova.network.neutron [req-3cf0b5ed-80da-4b61-998d-eb56a857e902 req-8e1efb97-6a2a-47a3-b3d2-709db5507bb7 service nova] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Updating instance_info_cache with network_info: [{"id": "a459b31b-865e-45d7-a62b-b7c95eb50c15", "address": "fa:16:3e:0c:f2:5f", "network": {"id": "d0e868c8-42eb-4685-8a15-4b3a8cc40530", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-379831430-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fa7abd14180453bb12e9dd5fc24523f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa459b31b-86", "ovs_interfaceid": "a459b31b-865e-45d7-a62b-b7c95eb50c15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 566.684226] env[68674]: DEBUG oslo_vmware.api [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239349, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.830583} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 566.699465] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82/baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 566.700125] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 566.701164] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "3d85c8c4-f09c-4f75-aff5-9a49d84ae006" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 566.701561] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "3d85c8c4-f09c-4f75-aff5-9a49d84ae006" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 566.703352] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e635a5f2-ec05-4e33-8ce9-2210944a1748 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.714526] env[68674]: DEBUG oslo_vmware.api [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 566.714526] env[68674]: value = "task-3239352" [ 566.714526] env[68674]: _type = "Task" [ 566.714526] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 566.730280] env[68674]: DEBUG oslo_vmware.api [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239352, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 566.775743] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Releasing lock "refresh_cache-02d4aee3-7267-4658-a277-8a9a00dd9f6e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 566.776159] env[68674]: DEBUG nova.compute.manager [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Instance network_info: |[{"id": "38369d37-449e-4f62-940b-9700d870d8c6", "address": "fa:16:3e:36:cb:68", "network": {"id": "d0e868c8-42eb-4685-8a15-4b3a8cc40530", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-379831430-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fa7abd14180453bb12e9dd5fc24523f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38369d37-44", "ovs_interfaceid": "38369d37-449e-4f62-940b-9700d870d8c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 566.776364] env[68674]: DEBUG oslo_vmware.api [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Task: {'id': task-3239350, 'name': PowerOffVM_Task, 'duration_secs': 0.539925} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 566.776804] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:36:cb:68', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b5c34919-7d52-4a52-bab1-81af4c8182ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '38369d37-449e-4f62-940b-9700d870d8c6', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 566.787075] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 566.787512] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 566.787851] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 566.791821] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 566.792017] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7ab62a65-e7a8-4eca-bceb-aefd3e40de4b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.795235] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e3395a00-0435-4177-8bca-c0ad69136e39 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.818142] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 566.818142] env[68674]: value = "task-3239354" [ 566.818142] env[68674]: _type = "Task" [ 566.818142] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 566.828433] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239354, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 566.830303] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3312e73e-3f25-44c0-91a1-cdbfb417eb1a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.833450] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ef2929d2-0594-4b76-9713-8cfd3c92b4ac tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Lock "89ccc16e-d0e5-4f7d-985c-8693188e7ed5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.782s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 566.839848] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93db95ac-c7c0-45bf-8558-166a030e17e2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.850258] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 566.850258] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 566.850258] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Deleting the datastore file [datastore1] 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 566.875828] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8337d9b1-8434-46bd-bf52-9cb43dd00a79 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.882495] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bca646a-3429-4546-8320-7644605c8d23 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.885397] env[68674]: DEBUG oslo_vmware.api [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239351, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 566.891939] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e3318a2-a4ee-4477-966f-e428d9338fc9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.899355] env[68674]: DEBUG oslo_vmware.api [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Waiting for the task: (returnval){ [ 566.899355] env[68674]: value = "task-3239355" [ 566.899355] env[68674]: _type = "Task" [ 566.899355] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 566.914238] env[68674]: DEBUG nova.compute.provider_tree [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 566.921893] env[68674]: DEBUG oslo_vmware.api [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Task: {'id': task-3239355, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 567.143217] env[68674]: DEBUG oslo_concurrency.lockutils [req-3cf0b5ed-80da-4b61-998d-eb56a857e902 req-8e1efb97-6a2a-47a3-b3d2-709db5507bb7 service nova] Releasing lock "refresh_cache-baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 567.208440] env[68674]: DEBUG nova.compute.manager [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 567.228667] env[68674]: DEBUG oslo_vmware.api [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239352, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069413} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 567.228667] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 567.229509] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-280769a8-b118-4297-812a-981bb98b7ace {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.262967] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Reconfiguring VM instance instance-00000005 to attach disk [datastore2] baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82/baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 567.264167] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c0c991b9-f184-4567-a507-49273ca89193 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.298659] env[68674]: DEBUG oslo_vmware.api [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 567.298659] env[68674]: value = "task-3239356" [ 567.298659] env[68674]: _type = "Task" [ 567.298659] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 567.310647] env[68674]: DEBUG oslo_vmware.api [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239356, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 567.330068] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239354, 'name': CreateVM_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 567.357248] env[68674]: DEBUG oslo_vmware.api [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239351, 'name': PowerOnVM_Task, 'duration_secs': 0.561962} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 567.357755] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 567.357976] env[68674]: INFO nova.compute.manager [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Took 12.58 seconds to spawn the instance on the hypervisor. [ 567.358179] env[68674]: DEBUG nova.compute.manager [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 567.359218] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d4a6901-9f38-489c-a9d1-8d104e1142e5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.412233] env[68674]: DEBUG oslo_vmware.api [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Task: {'id': task-3239355, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 567.420019] env[68674]: DEBUG nova.scheduler.client.report [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 567.453479] env[68674]: DEBUG nova.network.neutron [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Successfully created port: 7c0ea59c-e774-45af-b163-a886f32640b1 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 567.490995] env[68674]: DEBUG nova.compute.manager [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 567.520348] env[68674]: DEBUG nova.virt.hardware [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 567.520726] env[68674]: DEBUG nova.virt.hardware [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 567.520990] env[68674]: DEBUG nova.virt.hardware [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 567.521314] env[68674]: DEBUG nova.virt.hardware [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 567.521509] env[68674]: DEBUG nova.virt.hardware [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 567.521686] env[68674]: DEBUG nova.virt.hardware [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 567.522021] env[68674]: DEBUG nova.virt.hardware [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 567.522279] env[68674]: DEBUG nova.virt.hardware [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 567.522501] env[68674]: DEBUG nova.virt.hardware [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 567.522711] env[68674]: DEBUG nova.virt.hardware [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 567.522913] env[68674]: DEBUG nova.virt.hardware [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 567.523834] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0fe01b5-73c0-4233-829b-d34ed2d178d0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.534165] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54907e0a-851a-45f9-9ac9-f1277cd7441a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.743668] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 567.812915] env[68674]: DEBUG oslo_vmware.api [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239356, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 567.831129] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239354, 'name': CreateVM_Task, 'duration_secs': 0.778803} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 567.831365] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 567.832076] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 567.832279] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 567.832633] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 567.832915] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53fbf835-53a2-4c5c-b7d1-4737f11cab3a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.838749] env[68674]: DEBUG oslo_vmware.api [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 567.838749] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52cc4a4d-e339-cfd9-c04b-9787465b2612" [ 567.838749] env[68674]: _type = "Task" [ 567.838749] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 567.848844] env[68674]: DEBUG oslo_vmware.api [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52cc4a4d-e339-cfd9-c04b-9787465b2612, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 567.884196] env[68674]: INFO nova.compute.manager [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Took 18.86 seconds to build instance. [ 567.915720] env[68674]: DEBUG oslo_vmware.api [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Task: {'id': task-3239355, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.65941} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 567.916106] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 567.916345] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 567.916530] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 567.916723] env[68674]: INFO nova.compute.manager [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Took 2.19 seconds to destroy the instance on the hypervisor. [ 567.916998] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 567.917344] env[68674]: DEBUG nova.compute.manager [-] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 567.917441] env[68674]: DEBUG nova.network.neutron [-] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 567.922834] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.469s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 567.923775] env[68674]: DEBUG nova.compute.manager [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 567.929037] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.403s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 567.929150] env[68674]: INFO nova.compute.claims [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 568.068548] env[68674]: DEBUG nova.network.neutron [-] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 568.313761] env[68674]: DEBUG oslo_vmware.api [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239356, 'name': ReconfigVM_Task, 'duration_secs': 0.864208} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 568.314081] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Reconfigured VM instance instance-00000005 to attach disk [datastore2] baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82/baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 568.314729] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-df4c641f-659e-499a-baeb-86d33114b435 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.325196] env[68674]: DEBUG oslo_vmware.api [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 568.325196] env[68674]: value = "task-3239357" [ 568.325196] env[68674]: _type = "Task" [ 568.325196] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 568.334990] env[68674]: DEBUG oslo_vmware.api [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239357, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 568.353151] env[68674]: DEBUG oslo_vmware.api [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52cc4a4d-e339-cfd9-c04b-9787465b2612, 'name': SearchDatastore_Task, 'duration_secs': 0.028036} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 568.353151] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 568.353151] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 568.353151] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 568.353634] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 568.353902] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 568.354325] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c2edd41c-0503-41cb-844a-588c6e980d5e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.366597] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 568.366597] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 568.366597] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbfb5398-479c-49c5-81d3-8917fa4041cb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.373214] env[68674]: DEBUG oslo_vmware.api [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 568.373214] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52219917-5b1e-865a-9f59-b97b0dcfcb6b" [ 568.373214] env[68674]: _type = "Task" [ 568.373214] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 568.383794] env[68674]: DEBUG oslo_vmware.api [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52219917-5b1e-865a-9f59-b97b0dcfcb6b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 568.389515] env[68674]: DEBUG oslo_concurrency.lockutils [None req-88794b93-2f4f-4daa-a2a6-26feb0626c12 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "e84db5bd-b6ec-42ef-9c34-a4160c44d973" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.371s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 568.437027] env[68674]: DEBUG nova.compute.utils [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 568.443516] env[68674]: DEBUG nova.compute.manager [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Not allocating networking since 'none' was specified. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 568.572746] env[68674]: DEBUG nova.network.neutron [-] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 568.836419] env[68674]: DEBUG oslo_vmware.api [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239357, 'name': Rename_Task, 'duration_secs': 0.230117} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 568.836716] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 568.838439] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-50c597a6-9c4b-4949-8ed1-2d99dfab939b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.844674] env[68674]: DEBUG oslo_vmware.api [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 568.844674] env[68674]: value = "task-3239358" [ 568.844674] env[68674]: _type = "Task" [ 568.844674] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 568.855149] env[68674]: DEBUG oslo_vmware.api [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239358, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 568.888027] env[68674]: DEBUG oslo_vmware.api [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52219917-5b1e-865a-9f59-b97b0dcfcb6b, 'name': SearchDatastore_Task, 'duration_secs': 0.012683} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 568.888921] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0a5748c-ac79-4d01-a126-fd29d234362f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.898735] env[68674]: DEBUG oslo_vmware.api [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 568.898735] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f239d0-9fc6-174f-0a1c-18deb0b95217" [ 568.898735] env[68674]: _type = "Task" [ 568.898735] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 568.908022] env[68674]: DEBUG oslo_vmware.api [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f239d0-9fc6-174f-0a1c-18deb0b95217, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 568.941926] env[68674]: DEBUG nova.compute.manager [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 569.079606] env[68674]: INFO nova.compute.manager [-] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Took 1.16 seconds to deallocate network for instance. [ 569.189141] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79ae360b-7989-4ab5-abc4-e475328471cf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.199840] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21361544-3f4a-4ecc-a98b-75bdbf265cbf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.238894] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40178f52-0074-48a0-87f7-10da1aee6dea {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.248230] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07968919-1852-4b66-a059-ca3e7315af27 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.264673] env[68674]: DEBUG nova.compute.provider_tree [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 569.358063] env[68674]: DEBUG oslo_vmware.api [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239358, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 569.386486] env[68674]: DEBUG nova.network.neutron [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Successfully updated port: 81afa256-db6b-44e2-944a-7654579b8b50 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 569.416593] env[68674]: DEBUG oslo_vmware.api [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f239d0-9fc6-174f-0a1c-18deb0b95217, 'name': SearchDatastore_Task, 'duration_secs': 0.040933} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 569.416593] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 569.416593] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 02d4aee3-7267-4658-a277-8a9a00dd9f6e/02d4aee3-7267-4658-a277-8a9a00dd9f6e.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 569.417267] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-439e1507-c2f6-488d-8727-cf13fe98b84a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.426752] env[68674]: DEBUG oslo_vmware.api [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 569.426752] env[68674]: value = "task-3239359" [ 569.426752] env[68674]: _type = "Task" [ 569.426752] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 569.438714] env[68674]: DEBUG oslo_vmware.api [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239359, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 569.523367] env[68674]: DEBUG nova.compute.manager [req-5e2b913f-8242-4448-bb96-a86b30e1bc22 req-1a5e7ac1-2ba6-442a-99b8-54f7c465696d service nova] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Received event network-vif-plugged-38369d37-449e-4f62-940b-9700d870d8c6 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 569.523672] env[68674]: DEBUG oslo_concurrency.lockutils [req-5e2b913f-8242-4448-bb96-a86b30e1bc22 req-1a5e7ac1-2ba6-442a-99b8-54f7c465696d service nova] Acquiring lock "02d4aee3-7267-4658-a277-8a9a00dd9f6e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 569.524395] env[68674]: DEBUG oslo_concurrency.lockutils [req-5e2b913f-8242-4448-bb96-a86b30e1bc22 req-1a5e7ac1-2ba6-442a-99b8-54f7c465696d service nova] Lock "02d4aee3-7267-4658-a277-8a9a00dd9f6e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 569.524395] env[68674]: DEBUG oslo_concurrency.lockutils [req-5e2b913f-8242-4448-bb96-a86b30e1bc22 req-1a5e7ac1-2ba6-442a-99b8-54f7c465696d service nova] Lock "02d4aee3-7267-4658-a277-8a9a00dd9f6e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 569.524936] env[68674]: DEBUG nova.compute.manager [req-5e2b913f-8242-4448-bb96-a86b30e1bc22 req-1a5e7ac1-2ba6-442a-99b8-54f7c465696d service nova] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] No waiting events found dispatching network-vif-plugged-38369d37-449e-4f62-940b-9700d870d8c6 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 569.524936] env[68674]: WARNING nova.compute.manager [req-5e2b913f-8242-4448-bb96-a86b30e1bc22 req-1a5e7ac1-2ba6-442a-99b8-54f7c465696d service nova] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Received unexpected event network-vif-plugged-38369d37-449e-4f62-940b-9700d870d8c6 for instance with vm_state building and task_state spawning. [ 569.524936] env[68674]: DEBUG nova.compute.manager [req-5e2b913f-8242-4448-bb96-a86b30e1bc22 req-1a5e7ac1-2ba6-442a-99b8-54f7c465696d service nova] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Received event network-changed-38369d37-449e-4f62-940b-9700d870d8c6 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 569.525172] env[68674]: DEBUG nova.compute.manager [req-5e2b913f-8242-4448-bb96-a86b30e1bc22 req-1a5e7ac1-2ba6-442a-99b8-54f7c465696d service nova] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Refreshing instance network info cache due to event network-changed-38369d37-449e-4f62-940b-9700d870d8c6. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 569.525412] env[68674]: DEBUG oslo_concurrency.lockutils [req-5e2b913f-8242-4448-bb96-a86b30e1bc22 req-1a5e7ac1-2ba6-442a-99b8-54f7c465696d service nova] Acquiring lock "refresh_cache-02d4aee3-7267-4658-a277-8a9a00dd9f6e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 569.525605] env[68674]: DEBUG oslo_concurrency.lockutils [req-5e2b913f-8242-4448-bb96-a86b30e1bc22 req-1a5e7ac1-2ba6-442a-99b8-54f7c465696d service nova] Acquired lock "refresh_cache-02d4aee3-7267-4658-a277-8a9a00dd9f6e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 569.526220] env[68674]: DEBUG nova.network.neutron [req-5e2b913f-8242-4448-bb96-a86b30e1bc22 req-1a5e7ac1-2ba6-442a-99b8-54f7c465696d service nova] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Refreshing network info cache for port 38369d37-449e-4f62-940b-9700d870d8c6 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 569.587175] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 569.768520] env[68674]: DEBUG nova.scheduler.client.report [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 569.858369] env[68674]: DEBUG oslo_vmware.api [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239358, 'name': PowerOnVM_Task, 'duration_secs': 0.742121} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 569.859278] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 569.859278] env[68674]: INFO nova.compute.manager [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Took 10.54 seconds to spawn the instance on the hypervisor. [ 569.859278] env[68674]: DEBUG nova.compute.manager [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 569.860386] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f644f78-91fd-4cbd-b815-5d6eac86516c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.890562] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquiring lock "refresh_cache-e75d2bc7-f356-4443-9641-d9ebf35843cd" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 569.890715] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquired lock "refresh_cache-e75d2bc7-f356-4443-9641-d9ebf35843cd" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 569.890879] env[68674]: DEBUG nova.network.neutron [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 569.941654] env[68674]: DEBUG oslo_vmware.api [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239359, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 569.953665] env[68674]: DEBUG nova.compute.manager [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 569.986711] env[68674]: DEBUG nova.virt.hardware [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 569.987603] env[68674]: DEBUG nova.virt.hardware [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 569.987961] env[68674]: DEBUG nova.virt.hardware [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 569.988308] env[68674]: DEBUG nova.virt.hardware [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 569.988702] env[68674]: DEBUG nova.virt.hardware [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 569.989148] env[68674]: DEBUG nova.virt.hardware [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 569.989596] env[68674]: DEBUG nova.virt.hardware [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 569.989887] env[68674]: DEBUG nova.virt.hardware [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 569.990199] env[68674]: DEBUG nova.virt.hardware [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 569.990586] env[68674]: DEBUG nova.virt.hardware [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 569.993030] env[68674]: DEBUG nova.virt.hardware [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 569.993030] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dd84075-5d3e-4b14-bb2b-b916d5c0c91f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.004781] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fed639d8-f0d9-4afe-8559-92a20a1b406c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.022776] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Instance VIF info [] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 570.029645] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Creating folder: Project (89e67d52039141c9b45625b855c19b34). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 570.032366] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7fed3858-72d4-45d9-8a52-a76df0ea03cd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.045593] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Created folder: Project (89e67d52039141c9b45625b855c19b34) in parent group-v647377. [ 570.047026] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Creating folder: Instances. Parent ref: group-v647395. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 570.047026] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8b28e524-9b67-4c99-93a8-6b1a33ebb1b1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.060192] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Created folder: Instances in parent group-v647395. [ 570.060192] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 570.060192] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 570.060192] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d0ad6dc7-bff0-4c0e-90cf-11d76cbf2887 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.081501] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 570.081501] env[68674]: value = "task-3239362" [ 570.081501] env[68674]: _type = "Task" [ 570.081501] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 570.092790] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239362, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 570.275494] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.348s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 570.276366] env[68674]: DEBUG nova.compute.manager [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 570.283275] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.689s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 570.283275] env[68674]: INFO nova.compute.claims [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 570.384729] env[68674]: INFO nova.compute.manager [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Took 16.18 seconds to build instance. [ 570.442661] env[68674]: DEBUG oslo_vmware.api [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239359, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.668743} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 570.442661] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 02d4aee3-7267-4658-a277-8a9a00dd9f6e/02d4aee3-7267-4658-a277-8a9a00dd9f6e.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 570.442661] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 570.443076] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7fe4cb12-a74c-4803-91a8-47231ac20637 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.451782] env[68674]: DEBUG oslo_vmware.api [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 570.451782] env[68674]: value = "task-3239363" [ 570.451782] env[68674]: _type = "Task" [ 570.451782] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 570.469245] env[68674]: DEBUG oslo_vmware.api [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239363, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 570.545646] env[68674]: DEBUG nova.network.neutron [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 570.597442] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239362, 'name': CreateVM_Task, 'duration_secs': 0.477491} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 570.597643] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 570.598460] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 570.598460] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 570.598626] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 570.598837] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-161e49c1-08b0-4dda-9314-0b62edfe8085 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.609200] env[68674]: DEBUG oslo_vmware.api [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Waiting for the task: (returnval){ [ 570.609200] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52655201-cca1-ded5-f73f-1c3b857dfa9f" [ 570.609200] env[68674]: _type = "Task" [ 570.609200] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 570.623793] env[68674]: DEBUG oslo_vmware.api [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52655201-cca1-ded5-f73f-1c3b857dfa9f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 570.787673] env[68674]: DEBUG nova.compute.utils [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 570.793289] env[68674]: DEBUG nova.compute.manager [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 570.793467] env[68674]: DEBUG nova.network.neutron [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 570.886615] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0be9c393-15d1-4d4b-8e41-9e6c9324e02c tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.686s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 570.964774] env[68674]: DEBUG oslo_vmware.api [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239363, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076974} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 570.966046] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 570.967935] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-833062e4-21e7-4f4d-9298-ca7901621c21 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.997569] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Reconfiguring VM instance instance-00000006 to attach disk [datastore2] 02d4aee3-7267-4658-a277-8a9a00dd9f6e/02d4aee3-7267-4658-a277-8a9a00dd9f6e.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 570.997569] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f2e8d272-bb03-4d07-91c8-56cea866a069 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.021987] env[68674]: DEBUG oslo_vmware.api [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 571.021987] env[68674]: value = "task-3239364" [ 571.021987] env[68674]: _type = "Task" [ 571.021987] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 571.034867] env[68674]: DEBUG oslo_vmware.api [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239364, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.038341] env[68674]: DEBUG nova.policy [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '666d0d2bb76a41948cdb6df7b700429d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'be6579f30b2c418c98a5a373176baf05', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 571.133067] env[68674]: DEBUG oslo_vmware.api [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52655201-cca1-ded5-f73f-1c3b857dfa9f, 'name': SearchDatastore_Task, 'duration_secs': 0.044996} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 571.134070] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 571.134942] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 571.134942] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 571.135104] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 571.135217] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 571.135494] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b99566d7-5f50-4d4f-aebf-68c5c22bcdc3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.147322] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 571.147555] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 571.148397] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a9982e4-6f96-457d-b411-46979638d9ec {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.155861] env[68674]: DEBUG oslo_vmware.api [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Waiting for the task: (returnval){ [ 571.155861] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52e32dec-faae-ccd9-d2b7-879e417125f0" [ 571.155861] env[68674]: _type = "Task" [ 571.155861] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 571.168288] env[68674]: DEBUG oslo_vmware.api [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52e32dec-faae-ccd9-d2b7-879e417125f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.217077] env[68674]: DEBUG oslo_concurrency.lockutils [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Acquiring lock "d1c7a508-7d45-4eff-bb06-b85bfe392772" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 571.217327] env[68674]: DEBUG oslo_concurrency.lockutils [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Lock "d1c7a508-7d45-4eff-bb06-b85bfe392772" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 571.262278] env[68674]: DEBUG nova.network.neutron [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Updating instance_info_cache with network_info: [{"id": "81afa256-db6b-44e2-944a-7654579b8b50", "address": "fa:16:3e:11:0a:6e", "network": {"id": "51117274-b276-4648-9ae8-13d7da21bce9", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1367196897-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "be6579f30b2c418c98a5a373176baf05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "713e54d5-283f-493d-b003-f13182deaf7b", "external-id": "cl2-zone-703", "segmentation_id": 703, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81afa256-db", "ovs_interfaceid": "81afa256-db6b-44e2-944a-7654579b8b50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 571.295524] env[68674]: DEBUG nova.compute.manager [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 571.314011] env[68674]: INFO nova.compute.manager [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Rebuilding instance [ 571.335691] env[68674]: DEBUG nova.network.neutron [req-5e2b913f-8242-4448-bb96-a86b30e1bc22 req-1a5e7ac1-2ba6-442a-99b8-54f7c465696d service nova] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Updated VIF entry in instance network info cache for port 38369d37-449e-4f62-940b-9700d870d8c6. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 571.336057] env[68674]: DEBUG nova.network.neutron [req-5e2b913f-8242-4448-bb96-a86b30e1bc22 req-1a5e7ac1-2ba6-442a-99b8-54f7c465696d service nova] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Updating instance_info_cache with network_info: [{"id": "38369d37-449e-4f62-940b-9700d870d8c6", "address": "fa:16:3e:36:cb:68", "network": {"id": "d0e868c8-42eb-4685-8a15-4b3a8cc40530", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-379831430-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fa7abd14180453bb12e9dd5fc24523f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38369d37-44", "ovs_interfaceid": "38369d37-449e-4f62-940b-9700d870d8c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 571.378126] env[68674]: DEBUG nova.compute.manager [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 571.379472] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20f6fa29-c3ca-4bab-9cbe-99f00a617023 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.539308] env[68674]: DEBUG oslo_vmware.api [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239364, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.553781] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee7466fd-31f4-4b63-8f9f-6342844b7415 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.562719] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20c19d52-3193-4afa-9035-8530cd39ef63 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.598741] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68022409-c53c-4acf-8d16-75a52ba9c127 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.609739] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fef52f1-e506-4e94-981e-6a682bdb96e2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.628197] env[68674]: DEBUG nova.compute.provider_tree [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 571.667756] env[68674]: DEBUG oslo_vmware.api [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52e32dec-faae-ccd9-d2b7-879e417125f0, 'name': SearchDatastore_Task, 'duration_secs': 0.019277} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 571.668590] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39bf4534-74d7-41cd-a59d-41c1d227e04a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.675329] env[68674]: DEBUG oslo_vmware.api [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Waiting for the task: (returnval){ [ 571.675329] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5263e985-a5c2-cb59-4166-de8570116b79" [ 571.675329] env[68674]: _type = "Task" [ 571.675329] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 571.685658] env[68674]: DEBUG oslo_vmware.api [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5263e985-a5c2-cb59-4166-de8570116b79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.722470] env[68674]: DEBUG nova.compute.manager [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 571.769128] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Releasing lock "refresh_cache-e75d2bc7-f356-4443-9641-d9ebf35843cd" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 571.769641] env[68674]: DEBUG nova.compute.manager [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Instance network_info: |[{"id": "81afa256-db6b-44e2-944a-7654579b8b50", "address": "fa:16:3e:11:0a:6e", "network": {"id": "51117274-b276-4648-9ae8-13d7da21bce9", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1367196897-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "be6579f30b2c418c98a5a373176baf05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "713e54d5-283f-493d-b003-f13182deaf7b", "external-id": "cl2-zone-703", "segmentation_id": 703, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81afa256-db", "ovs_interfaceid": "81afa256-db6b-44e2-944a-7654579b8b50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 571.773421] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:11:0a:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '713e54d5-283f-493d-b003-f13182deaf7b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '81afa256-db6b-44e2-944a-7654579b8b50', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 571.790319] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Creating folder: Project (be6579f30b2c418c98a5a373176baf05). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 571.791227] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-588022e2-0eeb-4bca-b92e-4e19ccbdd729 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.816818] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Created folder: Project (be6579f30b2c418c98a5a373176baf05) in parent group-v647377. [ 571.816818] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Creating folder: Instances. Parent ref: group-v647398. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 571.820924] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b1e7a7af-338d-4b68-af35-f52c70ad5352 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.837804] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Created folder: Instances in parent group-v647398. [ 571.838169] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 571.838443] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 571.838654] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dc3c596b-e173-446b-b0dc-78b4a08a735a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.861033] env[68674]: DEBUG oslo_concurrency.lockutils [req-5e2b913f-8242-4448-bb96-a86b30e1bc22 req-1a5e7ac1-2ba6-442a-99b8-54f7c465696d service nova] Releasing lock "refresh_cache-02d4aee3-7267-4658-a277-8a9a00dd9f6e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 571.870287] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 571.870287] env[68674]: value = "task-3239367" [ 571.870287] env[68674]: _type = "Task" [ 571.870287] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 571.883548] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239367, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.033097] env[68674]: DEBUG oslo_vmware.api [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239364, 'name': ReconfigVM_Task, 'duration_secs': 0.786246} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.033557] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Reconfigured VM instance instance-00000006 to attach disk [datastore2] 02d4aee3-7267-4658-a277-8a9a00dd9f6e/02d4aee3-7267-4658-a277-8a9a00dd9f6e.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 572.034361] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d789fe8c-ac39-41c9-be08-8cb00c0e62c8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.043976] env[68674]: DEBUG oslo_vmware.api [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 572.043976] env[68674]: value = "task-3239368" [ 572.043976] env[68674]: _type = "Task" [ 572.043976] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.058451] env[68674]: DEBUG oslo_vmware.api [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239368, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.119739] env[68674]: DEBUG nova.network.neutron [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Successfully updated port: 7c0ea59c-e774-45af-b163-a886f32640b1 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 572.133648] env[68674]: DEBUG nova.scheduler.client.report [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 572.195333] env[68674]: DEBUG oslo_vmware.api [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5263e985-a5c2-cb59-4166-de8570116b79, 'name': SearchDatastore_Task, 'duration_secs': 0.017212} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.195626] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 572.195889] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] a123c5f2-e775-4dd2-9a5a-35e7d6705dfa/a123c5f2-e775-4dd2-9a5a-35e7d6705dfa.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 572.196169] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-086d2c7a-92bc-4895-9043-2a92ea42ff5f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.206651] env[68674]: DEBUG oslo_vmware.api [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Waiting for the task: (returnval){ [ 572.206651] env[68674]: value = "task-3239369" [ 572.206651] env[68674]: _type = "Task" [ 572.206651] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.218281] env[68674]: DEBUG oslo_vmware.api [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Task: {'id': task-3239369, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.262443] env[68674]: DEBUG oslo_concurrency.lockutils [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 572.318794] env[68674]: DEBUG nova.compute.manager [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 572.363971] env[68674]: DEBUG nova.virt.hardware [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 572.364419] env[68674]: DEBUG nova.virt.hardware [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 572.364578] env[68674]: DEBUG nova.virt.hardware [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 572.364860] env[68674]: DEBUG nova.virt.hardware [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 572.365116] env[68674]: DEBUG nova.virt.hardware [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 572.366374] env[68674]: DEBUG nova.virt.hardware [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 572.366374] env[68674]: DEBUG nova.virt.hardware [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 572.366374] env[68674]: DEBUG nova.virt.hardware [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 572.366374] env[68674]: DEBUG nova.virt.hardware [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 572.366601] env[68674]: DEBUG nova.virt.hardware [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 572.366810] env[68674]: DEBUG nova.virt.hardware [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 572.367872] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c8604a7-726d-4e34-8b5f-85cce1bcd793 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.389473] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-198db09d-4df4-4126-9783-04ec6209917d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.393259] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239367, 'name': CreateVM_Task, 'duration_secs': 0.47411} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.393259] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 572.393259] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.396215] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 572.396612] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 572.411390] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 572.411390] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-145d821e-a489-4d81-a7d1-6a921826aaa9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.412272] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-89e7bfdd-ca37-44ee-adae-9d36c3f82227 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.419176] env[68674]: DEBUG oslo_vmware.api [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for the task: (returnval){ [ 572.419176] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f726a7-1757-0fed-31b1-c2f0e57722e5" [ 572.419176] env[68674]: _type = "Task" [ 572.419176] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.424956] env[68674]: DEBUG oslo_vmware.api [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Waiting for the task: (returnval){ [ 572.424956] env[68674]: value = "task-3239370" [ 572.424956] env[68674]: _type = "Task" [ 572.424956] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.442780] env[68674]: DEBUG oslo_vmware.api [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f726a7-1757-0fed-31b1-c2f0e57722e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.451058] env[68674]: DEBUG oslo_vmware.api [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239370, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.557886] env[68674]: DEBUG oslo_vmware.api [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239368, 'name': Rename_Task, 'duration_secs': 0.201345} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.562201] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 572.562616] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-596fac0e-adf9-470f-a838-d2b8603ef6ae {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.575739] env[68674]: DEBUG oslo_vmware.api [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 572.575739] env[68674]: value = "task-3239371" [ 572.575739] env[68674]: _type = "Task" [ 572.575739] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.586745] env[68674]: DEBUG oslo_vmware.api [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239371, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.622948] env[68674]: DEBUG oslo_concurrency.lockutils [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquiring lock "refresh_cache-160d9aa2-048d-45a2-ab55-581c8721ac3b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.623118] env[68674]: DEBUG oslo_concurrency.lockutils [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquired lock "refresh_cache-160d9aa2-048d-45a2-ab55-581c8721ac3b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 572.623788] env[68674]: DEBUG nova.network.neutron [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 572.642662] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.361s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 572.642662] env[68674]: DEBUG nova.compute.manager [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 572.649791] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 7.110s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 572.726010] env[68674]: DEBUG oslo_vmware.api [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Task: {'id': task-3239369, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.897133] env[68674]: DEBUG nova.network.neutron [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Successfully created port: 40a38082-1691-4d4b-9fce-f07687409a92 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 572.939070] env[68674]: DEBUG oslo_vmware.api [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239370, 'name': PowerOffVM_Task, 'duration_secs': 0.170652} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.941205] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 572.941477] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 572.943169] env[68674]: DEBUG oslo_vmware.api [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f726a7-1757-0fed-31b1-c2f0e57722e5, 'name': SearchDatastore_Task, 'duration_secs': 0.023354} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.943169] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47d816d6-51a8-4574-a23b-8ad2d6e26543 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.950358] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 572.950646] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 572.950950] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.951166] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 572.952019] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 572.952019] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-608d239c-22ce-4e54-bc65-4e95049b80ef {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.962130] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 572.962130] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d165e11b-071b-403d-bdb1-e162f2f2396b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.963761] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 572.963954] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 572.970134] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-574dad26-9200-4d4a-85fb-a69ffc93eb59 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.979055] env[68674]: DEBUG oslo_vmware.api [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for the task: (returnval){ [ 572.979055] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527d7cb9-0f61-0ae0-ddbf-61af1e886880" [ 572.979055] env[68674]: _type = "Task" [ 572.979055] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.991704] env[68674]: DEBUG oslo_vmware.api [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527d7cb9-0f61-0ae0-ddbf-61af1e886880, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.997945] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 572.997945] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 572.999149] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Deleting the datastore file [datastore2] 89ccc16e-d0e5-4f7d-985c-8693188e7ed5 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 572.999149] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9ea5d3e5-9e40-40f7-88d1-4d2c7c475be7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.006248] env[68674]: DEBUG oslo_vmware.api [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Waiting for the task: (returnval){ [ 573.006248] env[68674]: value = "task-3239373" [ 573.006248] env[68674]: _type = "Task" [ 573.006248] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.015516] env[68674]: DEBUG oslo_vmware.api [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239373, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.075237] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Acquiring lock "9e337960-78c1-4ddb-a6f6-d6fd57dbf86d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 573.075237] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Lock "9e337960-78c1-4ddb-a6f6-d6fd57dbf86d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 573.095565] env[68674]: DEBUG oslo_vmware.api [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239371, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.149708] env[68674]: DEBUG nova.compute.utils [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 573.151206] env[68674]: DEBUG nova.compute.manager [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 573.151377] env[68674]: DEBUG nova.network.neutron [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 573.222248] env[68674]: DEBUG oslo_vmware.api [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Task: {'id': task-3239369, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.547477} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.222686] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] a123c5f2-e775-4dd2-9a5a-35e7d6705dfa/a123c5f2-e775-4dd2-9a5a-35e7d6705dfa.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 573.223022] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 573.223366] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-434a8565-0697-4303-8538-c1fc362d5d98 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.235340] env[68674]: DEBUG oslo_vmware.api [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Waiting for the task: (returnval){ [ 573.235340] env[68674]: value = "task-3239374" [ 573.235340] env[68674]: _type = "Task" [ 573.235340] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.249790] env[68674]: DEBUG oslo_vmware.api [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Task: {'id': task-3239374, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.291418] env[68674]: DEBUG nova.network.neutron [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 573.387150] env[68674]: DEBUG nova.policy [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a5773838e5804ee3aff12b550b804eb2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8cda9cc02a4542dca3a0f16209eb4101', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 573.494020] env[68674]: DEBUG oslo_vmware.api [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527d7cb9-0f61-0ae0-ddbf-61af1e886880, 'name': SearchDatastore_Task, 'duration_secs': 0.012183} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.494479] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f9d5ef7-8f24-4fa3-8a2b-eb36ad330263 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.505346] env[68674]: DEBUG oslo_vmware.api [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for the task: (returnval){ [ 573.505346] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]526de53e-94da-184c-f51b-f6c9015f36e7" [ 573.505346] env[68674]: _type = "Task" [ 573.505346] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.517534] env[68674]: DEBUG oslo_vmware.api [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]526de53e-94da-184c-f51b-f6c9015f36e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.523593] env[68674]: DEBUG oslo_vmware.api [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239373, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.299166} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.523854] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 573.524081] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 573.524282] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 573.580054] env[68674]: DEBUG nova.compute.manager [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 573.591987] env[68674]: DEBUG oslo_vmware.api [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239371, 'name': PowerOnVM_Task} progress is 81%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.661936] env[68674]: DEBUG nova.compute.manager [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 573.691583] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance e84db5bd-b6ec-42ef-9c34-a4160c44d973 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 573.692782] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 02d4aee3-7267-4658-a277-8a9a00dd9f6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 573.692782] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 573.748639] env[68674]: DEBUG oslo_vmware.api [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Task: {'id': task-3239374, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.127359} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.748639] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 573.749024] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eb45535-433c-4c6e-8697-ae200c14fe1d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.772254] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] a123c5f2-e775-4dd2-9a5a-35e7d6705dfa/a123c5f2-e775-4dd2-9a5a-35e7d6705dfa.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 573.773495] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-01b3faff-44a9-4e1c-9294-b1ca129bc141 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.795731] env[68674]: DEBUG oslo_vmware.api [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Waiting for the task: (returnval){ [ 573.795731] env[68674]: value = "task-3239375" [ 573.795731] env[68674]: _type = "Task" [ 573.795731] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.806349] env[68674]: DEBUG oslo_vmware.api [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Task: {'id': task-3239375, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.894497] env[68674]: DEBUG nova.network.neutron [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Updating instance_info_cache with network_info: [{"id": "7c0ea59c-e774-45af-b163-a886f32640b1", "address": "fa:16:3e:3a:13:ce", "network": {"id": "51117274-b276-4648-9ae8-13d7da21bce9", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1367196897-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "be6579f30b2c418c98a5a373176baf05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "713e54d5-283f-493d-b003-f13182deaf7b", "external-id": "cl2-zone-703", "segmentation_id": 703, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c0ea59c-e7", "ovs_interfaceid": "7c0ea59c-e774-45af-b163-a886f32640b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 574.013985] env[68674]: DEBUG oslo_vmware.api [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]526de53e-94da-184c-f51b-f6c9015f36e7, 'name': SearchDatastore_Task, 'duration_secs': 0.049693} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.017032] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 574.017032] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] e75d2bc7-f356-4443-9641-d9ebf35843cd/e75d2bc7-f356-4443-9641-d9ebf35843cd.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 574.017032] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4121fd33-1df9-4e28-914f-e25b3fcae51a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.027624] env[68674]: DEBUG oslo_vmware.api [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for the task: (returnval){ [ 574.027624] env[68674]: value = "task-3239376" [ 574.027624] env[68674]: _type = "Task" [ 574.027624] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.041377] env[68674]: DEBUG oslo_vmware.api [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239376, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.091890] env[68674]: DEBUG oslo_vmware.api [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239371, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.110915] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 574.197800] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 3d85c8c4-f09c-4f75-aff5-9a49d84ae006 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 574.197800] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance a123c5f2-e775-4dd2-9a5a-35e7d6705dfa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 574.197800] env[68674]: WARNING nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 574.197800] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 503e9328-bbd8-414f-8bea-250ed8247d67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 574.198081] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 160d9aa2-048d-45a2-ab55-581c8721ac3b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 574.198081] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 574.198081] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance e75d2bc7-f356-4443-9641-d9ebf35843cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 574.198081] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 58830b0e-dbf3-424d-8b9a-bb298b6bea21 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 574.308291] env[68674]: DEBUG oslo_vmware.api [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Task: {'id': task-3239375, 'name': ReconfigVM_Task, 'duration_secs': 0.356384} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.308900] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Reconfigured VM instance instance-00000009 to attach disk [datastore1] a123c5f2-e775-4dd2-9a5a-35e7d6705dfa/a123c5f2-e775-4dd2-9a5a-35e7d6705dfa.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 574.309843] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a5e90d0e-4eed-4fb4-a3bb-fb35b94fc017 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.321050] env[68674]: DEBUG oslo_vmware.api [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Waiting for the task: (returnval){ [ 574.321050] env[68674]: value = "task-3239377" [ 574.321050] env[68674]: _type = "Task" [ 574.321050] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.331072] env[68674]: DEBUG oslo_vmware.api [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Task: {'id': task-3239377, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.397512] env[68674]: DEBUG oslo_concurrency.lockutils [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Releasing lock "refresh_cache-160d9aa2-048d-45a2-ab55-581c8721ac3b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 574.397850] env[68674]: DEBUG nova.compute.manager [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Instance network_info: |[{"id": "7c0ea59c-e774-45af-b163-a886f32640b1", "address": "fa:16:3e:3a:13:ce", "network": {"id": "51117274-b276-4648-9ae8-13d7da21bce9", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1367196897-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "be6579f30b2c418c98a5a373176baf05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "713e54d5-283f-493d-b003-f13182deaf7b", "external-id": "cl2-zone-703", "segmentation_id": 703, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c0ea59c-e7", "ovs_interfaceid": "7c0ea59c-e774-45af-b163-a886f32640b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 574.398325] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3a:13:ce', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '713e54d5-283f-493d-b003-f13182deaf7b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7c0ea59c-e774-45af-b163-a886f32640b1', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 574.410493] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 574.410493] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 574.410493] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2323a4a5-8f49-46d2-8896-a674402ddf54 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.432459] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 574.432459] env[68674]: value = "task-3239378" [ 574.432459] env[68674]: _type = "Task" [ 574.432459] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.441671] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239378, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.543726] env[68674]: DEBUG oslo_vmware.api [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239376, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.576627] env[68674]: DEBUG nova.virt.hardware [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 574.576929] env[68674]: DEBUG nova.virt.hardware [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 574.576996] env[68674]: DEBUG nova.virt.hardware [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 574.582388] env[68674]: DEBUG nova.virt.hardware [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 574.582388] env[68674]: DEBUG nova.virt.hardware [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 574.582388] env[68674]: DEBUG nova.virt.hardware [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 574.582388] env[68674]: DEBUG nova.virt.hardware [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 574.582388] env[68674]: DEBUG nova.virt.hardware [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 574.582599] env[68674]: DEBUG nova.virt.hardware [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 574.582599] env[68674]: DEBUG nova.virt.hardware [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 574.582599] env[68674]: DEBUG nova.virt.hardware [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 574.583379] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88ae4eaf-4e77-4279-9a08-702231ebb9ea {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.605231] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6adf340c-811c-42ee-9ea0-4eb2d9e7f8ad {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.609595] env[68674]: DEBUG oslo_vmware.api [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239371, 'name': PowerOnVM_Task, 'duration_secs': 1.524927} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.609896] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 574.610647] env[68674]: INFO nova.compute.manager [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Took 13.00 seconds to spawn the instance on the hypervisor. [ 574.610647] env[68674]: DEBUG nova.compute.manager [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 574.611530] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-815913ea-296c-45b3-ac42-d3e6febc15d8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.624326] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Instance VIF info [] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 574.635242] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 574.635986] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 574.636276] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d2c4e174-4ad4-401a-8867-b95fef3f77d2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.666358] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 574.666358] env[68674]: value = "task-3239379" [ 574.666358] env[68674]: _type = "Task" [ 574.666358] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.679953] env[68674]: DEBUG nova.compute.manager [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 574.684738] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239379, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.705344] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 574.705344] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 89ccc16e-d0e5-4f7d-985c-8693188e7ed5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 574.720872] env[68674]: DEBUG nova.virt.hardware [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 574.720872] env[68674]: DEBUG nova.virt.hardware [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 574.720872] env[68674]: DEBUG nova.virt.hardware [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 574.721161] env[68674]: DEBUG nova.virt.hardware [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 574.721161] env[68674]: DEBUG nova.virt.hardware [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 574.721161] env[68674]: DEBUG nova.virt.hardware [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 574.721161] env[68674]: DEBUG nova.virt.hardware [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 574.721161] env[68674]: DEBUG nova.virt.hardware [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 574.721317] env[68674]: DEBUG nova.virt.hardware [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 574.721317] env[68674]: DEBUG nova.virt.hardware [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 574.721317] env[68674]: DEBUG nova.virt.hardware [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 574.722155] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e77a0d49-2f96-4f97-a138-f59b7a0f8ce1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.732711] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-718485d8-f171-4fc5-8a6b-92ee98630794 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.830278] env[68674]: DEBUG oslo_vmware.api [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Task: {'id': task-3239377, 'name': Rename_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.947499] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239378, 'name': CreateVM_Task, 'duration_secs': 0.389068} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.947499] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 574.948188] env[68674]: DEBUG oslo_concurrency.lockutils [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 574.948393] env[68674]: DEBUG oslo_concurrency.lockutils [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 574.948716] env[68674]: DEBUG oslo_concurrency.lockutils [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 574.950558] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66c9f6de-2645-4a75-8ec5-d00480d6b246 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.958386] env[68674]: DEBUG nova.network.neutron [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Successfully created port: b9f6f4d8-6ea5-4035-a9e6-ed1462036f63 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 574.966672] env[68674]: DEBUG oslo_vmware.api [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for the task: (returnval){ [ 574.966672] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523fccf1-7436-438f-7884-09409a58a827" [ 574.966672] env[68674]: _type = "Task" [ 574.966672] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.980221] env[68674]: DEBUG oslo_vmware.api [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523fccf1-7436-438f-7884-09409a58a827, 'name': SearchDatastore_Task, 'duration_secs': 0.011332} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.980536] env[68674]: DEBUG oslo_concurrency.lockutils [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 574.980835] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 574.981100] env[68674]: DEBUG oslo_concurrency.lockutils [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 574.981346] env[68674]: DEBUG oslo_concurrency.lockutils [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 574.981429] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 574.981952] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1f0e029e-8efb-49bb-b577-52e3f71af61a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.993514] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 574.993514] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 574.993724] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-761c1c48-162a-4f15-9f67-451b15310634 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.005568] env[68674]: DEBUG oslo_vmware.api [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for the task: (returnval){ [ 575.005568] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]529b8b82-3e00-ecaa-815c-966ba08116b4" [ 575.005568] env[68674]: _type = "Task" [ 575.005568] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.017658] env[68674]: DEBUG oslo_vmware.api [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]529b8b82-3e00-ecaa-815c-966ba08116b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.044079] env[68674]: DEBUG oslo_vmware.api [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239376, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.163433] env[68674]: INFO nova.compute.manager [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Took 19.01 seconds to build instance. [ 575.178338] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239379, 'name': CreateVM_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.212791] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance d1c7a508-7d45-4eff-bb06-b85bfe392772 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 575.212791] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68674) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 575.212791] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2496MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68674) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 575.253437] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Acquiring lock "357b515d-ef37-4688-969e-f894be30edb7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 575.254669] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Lock "357b515d-ef37-4688-969e-f894be30edb7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 575.336752] env[68674]: DEBUG oslo_vmware.api [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Task: {'id': task-3239377, 'name': Rename_Task, 'duration_secs': 0.906215} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.337064] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 575.340252] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bc619824-28c7-49eb-9e65-8b63cc036430 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.352244] env[68674]: DEBUG oslo_vmware.api [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Waiting for the task: (returnval){ [ 575.352244] env[68674]: value = "task-3239380" [ 575.352244] env[68674]: _type = "Task" [ 575.352244] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.364735] env[68674]: DEBUG oslo_vmware.api [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Task: {'id': task-3239380, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.522114] env[68674]: DEBUG oslo_vmware.api [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]529b8b82-3e00-ecaa-815c-966ba08116b4, 'name': SearchDatastore_Task, 'duration_secs': 0.011154} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.524512] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7146939b-b358-499f-82f5-474d72c7b916 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.532020] env[68674]: DEBUG oslo_vmware.api [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for the task: (returnval){ [ 575.532020] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52bbc14e-b033-d887-325f-25e97a310514" [ 575.532020] env[68674]: _type = "Task" [ 575.532020] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.547474] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e0ab635-7580-4479-ba22-ad071b85b7c9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.558516] env[68674]: DEBUG oslo_vmware.api [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239376, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.558516] env[68674]: DEBUG oslo_vmware.api [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52bbc14e-b033-d887-325f-25e97a310514, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.568543] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7ee9735-2b75-4000-b984-9742e2dd912e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.615610] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-596db33b-9df8-4b21-a97f-a18eaae455a0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.622219] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63a9792c-a8e2-47de-9a68-862dc94e39ec {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.636819] env[68674]: DEBUG nova.compute.provider_tree [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 575.672762] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf5cb607-df16-47ba-80a4-54e773ae5689 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "02d4aee3-7267-4658-a277-8a9a00dd9f6e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.530s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 575.679133] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239379, 'name': CreateVM_Task, 'duration_secs': 0.984229} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.679311] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 575.680259] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 575.680259] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 575.680259] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 575.681376] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3feaad4f-41dd-44cf-b7a2-38328d385b1d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.685894] env[68674]: DEBUG nova.compute.manager [req-aea36b3b-b133-404a-9675-2dd07d8fa9ab req-fd54c5b0-2206-49cb-b0d7-b16ad1ea4842 service nova] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Received event network-vif-plugged-81afa256-db6b-44e2-944a-7654579b8b50 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 575.686110] env[68674]: DEBUG oslo_concurrency.lockutils [req-aea36b3b-b133-404a-9675-2dd07d8fa9ab req-fd54c5b0-2206-49cb-b0d7-b16ad1ea4842 service nova] Acquiring lock "e75d2bc7-f356-4443-9641-d9ebf35843cd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 575.686459] env[68674]: DEBUG oslo_concurrency.lockutils [req-aea36b3b-b133-404a-9675-2dd07d8fa9ab req-fd54c5b0-2206-49cb-b0d7-b16ad1ea4842 service nova] Lock "e75d2bc7-f356-4443-9641-d9ebf35843cd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 575.686459] env[68674]: DEBUG oslo_concurrency.lockutils [req-aea36b3b-b133-404a-9675-2dd07d8fa9ab req-fd54c5b0-2206-49cb-b0d7-b16ad1ea4842 service nova] Lock "e75d2bc7-f356-4443-9641-d9ebf35843cd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 575.686607] env[68674]: DEBUG nova.compute.manager [req-aea36b3b-b133-404a-9675-2dd07d8fa9ab req-fd54c5b0-2206-49cb-b0d7-b16ad1ea4842 service nova] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] No waiting events found dispatching network-vif-plugged-81afa256-db6b-44e2-944a-7654579b8b50 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 575.686863] env[68674]: WARNING nova.compute.manager [req-aea36b3b-b133-404a-9675-2dd07d8fa9ab req-fd54c5b0-2206-49cb-b0d7-b16ad1ea4842 service nova] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Received unexpected event network-vif-plugged-81afa256-db6b-44e2-944a-7654579b8b50 for instance with vm_state building and task_state spawning. [ 575.686955] env[68674]: DEBUG nova.compute.manager [req-aea36b3b-b133-404a-9675-2dd07d8fa9ab req-fd54c5b0-2206-49cb-b0d7-b16ad1ea4842 service nova] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Received event network-changed-81afa256-db6b-44e2-944a-7654579b8b50 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 575.687075] env[68674]: DEBUG nova.compute.manager [req-aea36b3b-b133-404a-9675-2dd07d8fa9ab req-fd54c5b0-2206-49cb-b0d7-b16ad1ea4842 service nova] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Refreshing instance network info cache due to event network-changed-81afa256-db6b-44e2-944a-7654579b8b50. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 575.687308] env[68674]: DEBUG oslo_concurrency.lockutils [req-aea36b3b-b133-404a-9675-2dd07d8fa9ab req-fd54c5b0-2206-49cb-b0d7-b16ad1ea4842 service nova] Acquiring lock "refresh_cache-e75d2bc7-f356-4443-9641-d9ebf35843cd" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 575.687389] env[68674]: DEBUG oslo_concurrency.lockutils [req-aea36b3b-b133-404a-9675-2dd07d8fa9ab req-fd54c5b0-2206-49cb-b0d7-b16ad1ea4842 service nova] Acquired lock "refresh_cache-e75d2bc7-f356-4443-9641-d9ebf35843cd" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 575.687527] env[68674]: DEBUG nova.network.neutron [req-aea36b3b-b133-404a-9675-2dd07d8fa9ab req-fd54c5b0-2206-49cb-b0d7-b16ad1ea4842 service nova] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Refreshing network info cache for port 81afa256-db6b-44e2-944a-7654579b8b50 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 575.693964] env[68674]: DEBUG oslo_vmware.api [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Waiting for the task: (returnval){ [ 575.693964] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527694cb-8241-bd9e-9820-ae39e46cd53e" [ 575.693964] env[68674]: _type = "Task" [ 575.693964] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.708995] env[68674]: DEBUG oslo_vmware.api [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527694cb-8241-bd9e-9820-ae39e46cd53e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.760910] env[68674]: DEBUG nova.compute.manager [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 575.864156] env[68674]: DEBUG oslo_vmware.api [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Task: {'id': task-3239380, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.029413] env[68674]: DEBUG nova.network.neutron [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Successfully updated port: 40a38082-1691-4d4b-9fce-f07687409a92 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 576.047929] env[68674]: DEBUG oslo_vmware.api [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239376, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.547122} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.047929] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] e75d2bc7-f356-4443-9641-d9ebf35843cd/e75d2bc7-f356-4443-9641-d9ebf35843cd.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 576.047929] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 576.047929] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-123e339a-079e-4169-86db-dff6b53f97a7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.057976] env[68674]: DEBUG oslo_vmware.api [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52bbc14e-b033-d887-325f-25e97a310514, 'name': SearchDatastore_Task, 'duration_secs': 0.025312} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.059204] env[68674]: DEBUG oslo_concurrency.lockutils [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 576.060027] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 160d9aa2-048d-45a2-ab55-581c8721ac3b/160d9aa2-048d-45a2-ab55-581c8721ac3b.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 576.061028] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1ab8e122-c249-4f9a-bf50-d3d6541581a8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.067347] env[68674]: DEBUG oslo_vmware.api [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for the task: (returnval){ [ 576.067347] env[68674]: value = "task-3239381" [ 576.067347] env[68674]: _type = "Task" [ 576.067347] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.073646] env[68674]: DEBUG oslo_vmware.api [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for the task: (returnval){ [ 576.073646] env[68674]: value = "task-3239382" [ 576.073646] env[68674]: _type = "Task" [ 576.073646] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.083030] env[68674]: DEBUG oslo_vmware.api [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239381, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.087264] env[68674]: DEBUG oslo_vmware.api [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239382, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.146024] env[68674]: DEBUG nova.scheduler.client.report [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 576.208379] env[68674]: DEBUG oslo_vmware.api [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527694cb-8241-bd9e-9820-ae39e46cd53e, 'name': SearchDatastore_Task, 'duration_secs': 0.021479} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.209511] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 576.209511] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 576.209511] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 576.209511] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 576.210179] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 576.210477] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b5a3e005-90c4-428b-aa98-96ae8f220a64 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.242585] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 576.242772] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 576.244528] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ddb9f238-526d-400c-b114-0cb7b8107ea9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.249936] env[68674]: DEBUG oslo_vmware.api [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Waiting for the task: (returnval){ [ 576.249936] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528ea219-5f39-dac6-3565-3cbb3ad1fdb5" [ 576.249936] env[68674]: _type = "Task" [ 576.249936] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.261271] env[68674]: DEBUG oslo_vmware.api [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528ea219-5f39-dac6-3565-3cbb3ad1fdb5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.290546] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 576.344179] env[68674]: DEBUG oslo_concurrency.lockutils [None req-99abeffc-7178-41f9-8dc8-5c1fd7bf20bc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "e84db5bd-b6ec-42ef-9c34-a4160c44d973" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 576.344179] env[68674]: DEBUG oslo_concurrency.lockutils [None req-99abeffc-7178-41f9-8dc8-5c1fd7bf20bc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "e84db5bd-b6ec-42ef-9c34-a4160c44d973" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 576.344553] env[68674]: DEBUG nova.compute.manager [None req-99abeffc-7178-41f9-8dc8-5c1fd7bf20bc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 576.347754] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7da205b-04fb-40a8-9d77-3218dcbb7e99 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.361018] env[68674]: DEBUG nova.compute.manager [None req-99abeffc-7178-41f9-8dc8-5c1fd7bf20bc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68674) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 576.361018] env[68674]: DEBUG nova.objects.instance [None req-99abeffc-7178-41f9-8dc8-5c1fd7bf20bc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lazy-loading 'flavor' on Instance uuid e84db5bd-b6ec-42ef-9c34-a4160c44d973 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 576.369921] env[68674]: DEBUG oslo_vmware.api [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Task: {'id': task-3239380, 'name': PowerOnVM_Task, 'duration_secs': 0.579376} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.370338] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 576.370803] env[68674]: INFO nova.compute.manager [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Took 6.42 seconds to spawn the instance on the hypervisor. [ 576.370803] env[68674]: DEBUG nova.compute.manager [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 576.374156] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7873fd5-313e-4c04-ab96-e29ae0e3a88d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.533812] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquiring lock "refresh_cache-7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 576.534027] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquired lock "refresh_cache-7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 576.534225] env[68674]: DEBUG nova.network.neutron [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 576.595031] env[68674]: DEBUG oslo_vmware.api [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239381, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070283} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.595792] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 576.597138] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ab08b9d-ec45-4dda-96b1-f09a43911e32 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.604139] env[68674]: DEBUG oslo_vmware.api [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239382, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.636799] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Reconfiguring VM instance instance-00000007 to attach disk [datastore2] e75d2bc7-f356-4443-9641-d9ebf35843cd/e75d2bc7-f356-4443-9641-d9ebf35843cd.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 576.642237] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2427d320-bfc3-479e-bec6-6be2579bdf42 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.662356] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68674) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 576.662621] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.015s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 576.663295] env[68674]: DEBUG oslo_concurrency.lockutils [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Acquiring lock "50bb7509-b7e9-4dc3-9746-acd46010cc26" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 576.663503] env[68674]: DEBUG oslo_concurrency.lockutils [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Lock "50bb7509-b7e9-4dc3-9746-acd46010cc26" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 576.665759] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.922s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 576.667501] env[68674]: INFO nova.compute.claims [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 576.680456] env[68674]: DEBUG oslo_vmware.api [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for the task: (returnval){ [ 576.680456] env[68674]: value = "task-3239383" [ 576.680456] env[68674]: _type = "Task" [ 576.680456] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.697244] env[68674]: DEBUG oslo_vmware.api [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239383, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.767346] env[68674]: DEBUG oslo_vmware.api [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528ea219-5f39-dac6-3565-3cbb3ad1fdb5, 'name': SearchDatastore_Task, 'duration_secs': 0.015517} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.768287] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-daa94bd4-0038-4811-a907-82aeed420b45 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.776106] env[68674]: DEBUG oslo_vmware.api [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Waiting for the task: (returnval){ [ 576.776106] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528b3fa1-2d34-4143-c62b-2d9985f92017" [ 576.776106] env[68674]: _type = "Task" [ 576.776106] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.788251] env[68674]: DEBUG oslo_vmware.api [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528b3fa1-2d34-4143-c62b-2d9985f92017, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.896039] env[68674]: INFO nova.compute.manager [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Took 15.63 seconds to build instance. [ 576.970718] env[68674]: DEBUG nova.network.neutron [req-aea36b3b-b133-404a-9675-2dd07d8fa9ab req-fd54c5b0-2206-49cb-b0d7-b16ad1ea4842 service nova] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Updated VIF entry in instance network info cache for port 81afa256-db6b-44e2-944a-7654579b8b50. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 576.970944] env[68674]: DEBUG nova.network.neutron [req-aea36b3b-b133-404a-9675-2dd07d8fa9ab req-fd54c5b0-2206-49cb-b0d7-b16ad1ea4842 service nova] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Updating instance_info_cache with network_info: [{"id": "81afa256-db6b-44e2-944a-7654579b8b50", "address": "fa:16:3e:11:0a:6e", "network": {"id": "51117274-b276-4648-9ae8-13d7da21bce9", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1367196897-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be6579f30b2c418c98a5a373176baf05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "713e54d5-283f-493d-b003-f13182deaf7b", "external-id": "cl2-zone-703", "segmentation_id": 703, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81afa256-db", "ovs_interfaceid": "81afa256-db6b-44e2-944a-7654579b8b50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 577.074028] env[68674]: DEBUG nova.compute.manager [req-8c4e4137-cdea-4bf8-898b-e7dfd9578e30 req-a7e7c3a1-c170-4088-9b61-48a4bf57d3ae service nova] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Received event network-changed-27bb7642-85fc-46b6-9ac7-8a3a6db3271a {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 577.074028] env[68674]: DEBUG nova.compute.manager [req-8c4e4137-cdea-4bf8-898b-e7dfd9578e30 req-a7e7c3a1-c170-4088-9b61-48a4bf57d3ae service nova] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Refreshing instance network info cache due to event network-changed-27bb7642-85fc-46b6-9ac7-8a3a6db3271a. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 577.074028] env[68674]: DEBUG oslo_concurrency.lockutils [req-8c4e4137-cdea-4bf8-898b-e7dfd9578e30 req-a7e7c3a1-c170-4088-9b61-48a4bf57d3ae service nova] Acquiring lock "refresh_cache-58830b0e-dbf3-424d-8b9a-bb298b6bea21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 577.074028] env[68674]: DEBUG oslo_concurrency.lockutils [req-8c4e4137-cdea-4bf8-898b-e7dfd9578e30 req-a7e7c3a1-c170-4088-9b61-48a4bf57d3ae service nova] Acquired lock "refresh_cache-58830b0e-dbf3-424d-8b9a-bb298b6bea21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 577.074028] env[68674]: DEBUG nova.network.neutron [req-8c4e4137-cdea-4bf8-898b-e7dfd9578e30 req-a7e7c3a1-c170-4088-9b61-48a4bf57d3ae service nova] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Refreshing network info cache for port 27bb7642-85fc-46b6-9ac7-8a3a6db3271a {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 577.093372] env[68674]: DEBUG oslo_vmware.api [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239382, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.989051} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 577.094830] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 160d9aa2-048d-45a2-ab55-581c8721ac3b/160d9aa2-048d-45a2-ab55-581c8721ac3b.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 577.094830] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 577.094830] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c77c9d44-36cd-4498-acb5-f7c5e3945daf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.103295] env[68674]: DEBUG oslo_vmware.api [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for the task: (returnval){ [ 577.103295] env[68674]: value = "task-3239384" [ 577.103295] env[68674]: _type = "Task" [ 577.103295] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.114997] env[68674]: DEBUG oslo_vmware.api [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239384, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.128712] env[68674]: DEBUG nova.network.neutron [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 577.166821] env[68674]: DEBUG nova.compute.manager [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 577.196070] env[68674]: DEBUG oslo_vmware.api [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239383, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.290836] env[68674]: DEBUG oslo_vmware.api [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528b3fa1-2d34-4143-c62b-2d9985f92017, 'name': SearchDatastore_Task, 'duration_secs': 0.064887} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 577.291219] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 577.291614] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 89ccc16e-d0e5-4f7d-985c-8693188e7ed5/89ccc16e-d0e5-4f7d-985c-8693188e7ed5.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 577.292475] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fe813405-9b1f-4c12-b754-8e2660c29a10 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.302201] env[68674]: DEBUG oslo_vmware.api [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Waiting for the task: (returnval){ [ 577.302201] env[68674]: value = "task-3239385" [ 577.302201] env[68674]: _type = "Task" [ 577.302201] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.328146] env[68674]: DEBUG oslo_vmware.api [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239385, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.369423] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-99abeffc-7178-41f9-8dc8-5c1fd7bf20bc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 577.369778] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d2e140e7-f7cb-457b-b3cc-21b7839fbbc8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.380358] env[68674]: DEBUG oslo_vmware.api [None req-99abeffc-7178-41f9-8dc8-5c1fd7bf20bc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 577.380358] env[68674]: value = "task-3239386" [ 577.380358] env[68674]: _type = "Task" [ 577.380358] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.390341] env[68674]: DEBUG oslo_vmware.api [None req-99abeffc-7178-41f9-8dc8-5c1fd7bf20bc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239386, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.398633] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e8b17a47-582e-4236-a7d9-f1af03191d0a tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Lock "a123c5f2-e775-4dd2-9a5a-35e7d6705dfa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.156s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 577.420902] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Acquiring lock "60ded0c9-7e20-4071-b5ce-9189d8d01d5c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 577.420902] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Lock "60ded0c9-7e20-4071-b5ce-9189d8d01d5c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 577.477327] env[68674]: DEBUG oslo_concurrency.lockutils [req-aea36b3b-b133-404a-9675-2dd07d8fa9ab req-fd54c5b0-2206-49cb-b0d7-b16ad1ea4842 service nova] Releasing lock "refresh_cache-e75d2bc7-f356-4443-9641-d9ebf35843cd" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 577.477671] env[68674]: DEBUG nova.compute.manager [req-aea36b3b-b133-404a-9675-2dd07d8fa9ab req-fd54c5b0-2206-49cb-b0d7-b16ad1ea4842 service nova] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Received event network-vif-plugged-7c0ea59c-e774-45af-b163-a886f32640b1 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 577.477913] env[68674]: DEBUG oslo_concurrency.lockutils [req-aea36b3b-b133-404a-9675-2dd07d8fa9ab req-fd54c5b0-2206-49cb-b0d7-b16ad1ea4842 service nova] Acquiring lock "160d9aa2-048d-45a2-ab55-581c8721ac3b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 577.478172] env[68674]: DEBUG oslo_concurrency.lockutils [req-aea36b3b-b133-404a-9675-2dd07d8fa9ab req-fd54c5b0-2206-49cb-b0d7-b16ad1ea4842 service nova] Lock "160d9aa2-048d-45a2-ab55-581c8721ac3b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 577.478400] env[68674]: DEBUG oslo_concurrency.lockutils [req-aea36b3b-b133-404a-9675-2dd07d8fa9ab req-fd54c5b0-2206-49cb-b0d7-b16ad1ea4842 service nova] Lock "160d9aa2-048d-45a2-ab55-581c8721ac3b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 577.478575] env[68674]: DEBUG nova.compute.manager [req-aea36b3b-b133-404a-9675-2dd07d8fa9ab req-fd54c5b0-2206-49cb-b0d7-b16ad1ea4842 service nova] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] No waiting events found dispatching network-vif-plugged-7c0ea59c-e774-45af-b163-a886f32640b1 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 577.478744] env[68674]: WARNING nova.compute.manager [req-aea36b3b-b133-404a-9675-2dd07d8fa9ab req-fd54c5b0-2206-49cb-b0d7-b16ad1ea4842 service nova] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Received unexpected event network-vif-plugged-7c0ea59c-e774-45af-b163-a886f32640b1 for instance with vm_state building and task_state spawning. [ 577.619822] env[68674]: DEBUG oslo_vmware.api [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239384, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078879} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 577.620360] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 577.621281] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7be1f89a-7deb-4c05-8fa8-9be17020dde8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.654938] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Reconfiguring VM instance instance-00000008 to attach disk [datastore2] 160d9aa2-048d-45a2-ab55-581c8721ac3b/160d9aa2-048d-45a2-ab55-581c8721ac3b.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 577.659226] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0c87f948-a838-4e52-8aee-fcf29e32fc6d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.692306] env[68674]: DEBUG oslo_vmware.api [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for the task: (returnval){ [ 577.692306] env[68674]: value = "task-3239387" [ 577.692306] env[68674]: _type = "Task" [ 577.692306] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.702805] env[68674]: DEBUG oslo_vmware.api [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239383, 'name': ReconfigVM_Task, 'duration_secs': 0.894888} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 577.704100] env[68674]: DEBUG oslo_concurrency.lockutils [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 577.706660] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Reconfigured VM instance instance-00000007 to attach disk [datastore2] e75d2bc7-f356-4443-9641-d9ebf35843cd/e75d2bc7-f356-4443-9641-d9ebf35843cd.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 577.706660] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6ce9885f-9013-4da7-8dd3-37fcd7b085ad {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.714702] env[68674]: DEBUG oslo_vmware.api [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239387, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.725210] env[68674]: DEBUG oslo_vmware.api [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for the task: (returnval){ [ 577.725210] env[68674]: value = "task-3239388" [ 577.725210] env[68674]: _type = "Task" [ 577.725210] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.750592] env[68674]: DEBUG oslo_vmware.api [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239388, 'name': Rename_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.826058] env[68674]: DEBUG oslo_vmware.api [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239385, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.897158] env[68674]: DEBUG oslo_vmware.api [None req-99abeffc-7178-41f9-8dc8-5c1fd7bf20bc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239386, 'name': PowerOffVM_Task, 'duration_secs': 0.407481} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 577.897519] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-99abeffc-7178-41f9-8dc8-5c1fd7bf20bc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 577.897751] env[68674]: DEBUG nova.compute.manager [None req-99abeffc-7178-41f9-8dc8-5c1fd7bf20bc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 577.898641] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa8d7c35-5400-4231-9a59-75198798a157 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.924215] env[68674]: DEBUG nova.compute.manager [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 578.019257] env[68674]: DEBUG nova.network.neutron [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Updating instance_info_cache with network_info: [{"id": "40a38082-1691-4d4b-9fce-f07687409a92", "address": "fa:16:3e:9b:97:af", "network": {"id": "51117274-b276-4648-9ae8-13d7da21bce9", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1367196897-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be6579f30b2c418c98a5a373176baf05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "713e54d5-283f-493d-b003-f13182deaf7b", "external-id": "cl2-zone-703", "segmentation_id": 703, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40a38082-16", "ovs_interfaceid": "40a38082-1691-4d4b-9fce-f07687409a92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 578.093450] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f70cac17-8166-459d-82ca-f804ccef629f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.103211] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9edb9b4-edc0-43ba-a385-f54561ab4030 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.137438] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2894ff09-0c91-481d-a22d-6c7b9de0ba2b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.147449] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-041adc46-f0df-48fc-b3c8-92826913eee9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.157360] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "8790d635-fec5-4dcf-8cb0-220c2edec971" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 578.157650] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "8790d635-fec5-4dcf-8cb0-220c2edec971" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 578.168415] env[68674]: DEBUG nova.compute.provider_tree [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 578.206733] env[68674]: DEBUG oslo_vmware.api [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239387, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.248772] env[68674]: DEBUG oslo_vmware.api [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239388, 'name': Rename_Task, 'duration_secs': 0.322003} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.249172] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 578.249774] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7dcee5d2-17ad-4897-b7ea-756d24b58ff9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.259693] env[68674]: DEBUG oslo_vmware.api [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for the task: (returnval){ [ 578.259693] env[68674]: value = "task-3239389" [ 578.259693] env[68674]: _type = "Task" [ 578.259693] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.270789] env[68674]: DEBUG oslo_vmware.api [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239389, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.317379] env[68674]: DEBUG oslo_vmware.api [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239385, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.422693] env[68674]: DEBUG oslo_concurrency.lockutils [None req-99abeffc-7178-41f9-8dc8-5c1fd7bf20bc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "e84db5bd-b6ec-42ef-9c34-a4160c44d973" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.078s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 578.461247] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 578.527526] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Releasing lock "refresh_cache-7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 578.527722] env[68674]: DEBUG nova.compute.manager [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Instance network_info: |[{"id": "40a38082-1691-4d4b-9fce-f07687409a92", "address": "fa:16:3e:9b:97:af", "network": {"id": "51117274-b276-4648-9ae8-13d7da21bce9", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1367196897-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be6579f30b2c418c98a5a373176baf05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "713e54d5-283f-493d-b003-f13182deaf7b", "external-id": "cl2-zone-703", "segmentation_id": 703, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40a38082-16", "ovs_interfaceid": "40a38082-1691-4d4b-9fce-f07687409a92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 578.528255] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9b:97:af', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '713e54d5-283f-493d-b003-f13182deaf7b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '40a38082-1691-4d4b-9fce-f07687409a92', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 578.539418] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 578.539731] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 578.540025] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aabde16c-c650-4b0b-a26a-18ed0a23cbea {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.568357] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 578.568357] env[68674]: value = "task-3239390" [ 578.568357] env[68674]: _type = "Task" [ 578.568357] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.580363] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239390, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.603638] env[68674]: DEBUG nova.network.neutron [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Successfully updated port: b9f6f4d8-6ea5-4035-a9e6-ed1462036f63 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 578.671799] env[68674]: DEBUG nova.scheduler.client.report [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 578.703598] env[68674]: DEBUG nova.network.neutron [req-8c4e4137-cdea-4bf8-898b-e7dfd9578e30 req-a7e7c3a1-c170-4088-9b61-48a4bf57d3ae service nova] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Updated VIF entry in instance network info cache for port 27bb7642-85fc-46b6-9ac7-8a3a6db3271a. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 578.704131] env[68674]: DEBUG nova.network.neutron [req-8c4e4137-cdea-4bf8-898b-e7dfd9578e30 req-a7e7c3a1-c170-4088-9b61-48a4bf57d3ae service nova] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Updating instance_info_cache with network_info: [{"id": "27bb7642-85fc-46b6-9ac7-8a3a6db3271a", "address": "fa:16:3e:55:ec:a9", "network": {"id": "a8c05d42-7059-4056-96f9-f929e862948a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-2017205475-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.217", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c18d4b8073be4aa59800f2c6f482fdad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4c5eb94-841c-4713-985a-8fc4117fbaf1", "external-id": "nsx-vlan-transportzone-425", "segmentation_id": 425, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27bb7642-85", "ovs_interfaceid": "27bb7642-85fc-46b6-9ac7-8a3a6db3271a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 578.709197] env[68674]: DEBUG oslo_vmware.api [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239387, 'name': ReconfigVM_Task, 'duration_secs': 0.640285} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.709708] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Reconfigured VM instance instance-00000008 to attach disk [datastore2] 160d9aa2-048d-45a2-ab55-581c8721ac3b/160d9aa2-048d-45a2-ab55-581c8721ac3b.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 578.710572] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f850127d-5d10-4398-bd51-59def0485538 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.723024] env[68674]: DEBUG oslo_vmware.api [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for the task: (returnval){ [ 578.723024] env[68674]: value = "task-3239391" [ 578.723024] env[68674]: _type = "Task" [ 578.723024] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.732341] env[68674]: DEBUG oslo_vmware.api [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239391, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.771860] env[68674]: DEBUG oslo_vmware.api [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239389, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.817649] env[68674]: DEBUG oslo_vmware.api [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239385, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.082067] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239390, 'name': CreateVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.107952] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Acquiring lock "refresh_cache-503e9328-bbd8-414f-8bea-250ed8247d67" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 579.108253] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Acquired lock "refresh_cache-503e9328-bbd8-414f-8bea-250ed8247d67" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 579.108486] env[68674]: DEBUG nova.network.neutron [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 579.176789] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.511s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 579.177882] env[68674]: DEBUG nova.compute.manager [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 579.180413] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.593s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 579.180518] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 579.187224] env[68674]: DEBUG oslo_concurrency.lockutils [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.921s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 579.189268] env[68674]: INFO nova.compute.claims [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 579.210324] env[68674]: DEBUG oslo_concurrency.lockutils [req-8c4e4137-cdea-4bf8-898b-e7dfd9578e30 req-a7e7c3a1-c170-4088-9b61-48a4bf57d3ae service nova] Releasing lock "refresh_cache-58830b0e-dbf3-424d-8b9a-bb298b6bea21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 579.225285] env[68674]: INFO nova.scheduler.client.report [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Deleted allocations for instance 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae [ 579.236211] env[68674]: DEBUG oslo_vmware.api [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239391, 'name': Rename_Task, 'duration_secs': 0.436437} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.236211] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 579.236453] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-edab0dc9-66ac-4b08-9516-b564789452fb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.245520] env[68674]: DEBUG oslo_vmware.api [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for the task: (returnval){ [ 579.245520] env[68674]: value = "task-3239392" [ 579.245520] env[68674]: _type = "Task" [ 579.245520] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.256722] env[68674]: DEBUG oslo_vmware.api [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239392, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.277456] env[68674]: DEBUG oslo_vmware.api [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239389, 'name': PowerOnVM_Task, 'duration_secs': 0.905921} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.277568] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 579.277767] env[68674]: INFO nova.compute.manager [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Took 14.16 seconds to spawn the instance on the hypervisor. [ 579.278194] env[68674]: DEBUG nova.compute.manager [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 579.280173] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27212929-f77c-4acd-8bf5-dd90161b3d90 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.322946] env[68674]: DEBUG oslo_vmware.api [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239385, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.834097} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.327076] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 89ccc16e-d0e5-4f7d-985c-8693188e7ed5/89ccc16e-d0e5-4f7d-985c-8693188e7ed5.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 579.327076] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 579.327076] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3fb4e6f7-e7db-4803-9e89-490f572ea745 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.335778] env[68674]: DEBUG oslo_vmware.api [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Waiting for the task: (returnval){ [ 579.335778] env[68674]: value = "task-3239393" [ 579.335778] env[68674]: _type = "Task" [ 579.335778] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.345686] env[68674]: DEBUG oslo_vmware.api [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239393, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.581549] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239390, 'name': CreateVM_Task, 'duration_secs': 0.674625} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.581882] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 579.585115] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 579.585115] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 579.585115] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 579.585115] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7d8cb45-4813-4866-966b-f5b514a12ca8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.591166] env[68674]: DEBUG oslo_vmware.api [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for the task: (returnval){ [ 579.591166] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5277568a-f14d-91f3-3241-9d6dc827c003" [ 579.591166] env[68674]: _type = "Task" [ 579.591166] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.599436] env[68674]: DEBUG oslo_vmware.api [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5277568a-f14d-91f3-3241-9d6dc827c003, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.679152] env[68674]: DEBUG nova.network.neutron [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 579.701162] env[68674]: DEBUG nova.compute.utils [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 579.706327] env[68674]: DEBUG nova.compute.manager [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 579.706327] env[68674]: DEBUG nova.network.neutron [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 579.741188] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c6fb9d9e-e6a5-4065-8c3c-de788dcfa50f tempest-ServersAaction247Test-717479239 tempest-ServersAaction247Test-717479239-project-member] Lock "0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.938s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 579.758784] env[68674]: DEBUG oslo_vmware.api [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239392, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.806625] env[68674]: INFO nova.compute.manager [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Took 23.02 seconds to build instance. [ 579.813551] env[68674]: DEBUG nova.policy [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3ce343abf0f14bb5b5141c50113ccf6b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61ea6bfeb37d470a970e9c98e4827ade', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 579.848948] env[68674]: DEBUG oslo_vmware.api [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239393, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.0793} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.849399] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 579.850118] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd91e5aa-3716-42f9-81ef-f05041e0a1ec {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.875452] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Reconfiguring VM instance instance-00000004 to attach disk [datastore2] 89ccc16e-d0e5-4f7d-985c-8693188e7ed5/89ccc16e-d0e5-4f7d-985c-8693188e7ed5.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 579.879250] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-113fe2de-1647-4856-b24e-696561856e45 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.902643] env[68674]: DEBUG oslo_vmware.api [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Waiting for the task: (returnval){ [ 579.902643] env[68674]: value = "task-3239394" [ 579.902643] env[68674]: _type = "Task" [ 579.902643] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.912537] env[68674]: DEBUG oslo_vmware.api [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239394, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.063414] env[68674]: DEBUG oslo_concurrency.lockutils [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Acquiring lock "f500b495-7bfb-40ff-8a10-e46ca6744902" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 580.063865] env[68674]: DEBUG oslo_concurrency.lockutils [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Lock "f500b495-7bfb-40ff-8a10-e46ca6744902" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 580.101084] env[68674]: DEBUG oslo_vmware.api [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5277568a-f14d-91f3-3241-9d6dc827c003, 'name': SearchDatastore_Task, 'duration_secs': 0.024122} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.101324] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 580.101551] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 580.101837] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 580.101986] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 580.102206] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 580.102522] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-10c7e16d-b187-4d27-9e27-2d3ac64458f6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.116811] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 580.116811] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 580.116811] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d306a2f6-61ab-49e3-90b7-3470e2761a24 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.123691] env[68674]: DEBUG oslo_vmware.api [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for the task: (returnval){ [ 580.123691] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525e1742-9ed2-ce4d-2ef2-648bf9fc369c" [ 580.123691] env[68674]: _type = "Task" [ 580.123691] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.138656] env[68674]: DEBUG oslo_vmware.api [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525e1742-9ed2-ce4d-2ef2-648bf9fc369c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.139914] env[68674]: DEBUG nova.network.neutron [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Updating instance_info_cache with network_info: [{"id": "b9f6f4d8-6ea5-4035-a9e6-ed1462036f63", "address": "fa:16:3e:38:28:4f", "network": {"id": "ec06235b-8062-47c5-b224-7a61c2daddb4", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-844083245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cda9cc02a4542dca3a0f16209eb4101", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "085fb0ff-9285-4f1d-a008-a14da4844357", "external-id": "nsx-vlan-transportzone-729", "segmentation_id": 729, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9f6f4d8-6e", "ovs_interfaceid": "b9f6f4d8-6ea5-4035-a9e6-ed1462036f63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 580.204095] env[68674]: DEBUG nova.compute.manager [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 580.262676] env[68674]: DEBUG oslo_vmware.api [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239392, 'name': PowerOnVM_Task, 'duration_secs': 0.833298} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.263014] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 580.263241] env[68674]: INFO nova.compute.manager [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Took 12.77 seconds to spawn the instance on the hypervisor. [ 580.263425] env[68674]: DEBUG nova.compute.manager [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 580.265115] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-072def7f-9785-4464-8e0d-23878199fc06 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.301566] env[68674]: DEBUG nova.compute.manager [req-d8fb7267-ba8d-4f7a-8815-1e8f982b118c req-bf4fc95d-c537-4fb5-8152-53d5d3f6d9f9 service nova] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Received event network-changed-7c0ea59c-e774-45af-b163-a886f32640b1 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 580.301849] env[68674]: DEBUG nova.compute.manager [req-d8fb7267-ba8d-4f7a-8815-1e8f982b118c req-bf4fc95d-c537-4fb5-8152-53d5d3f6d9f9 service nova] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Refreshing instance network info cache due to event network-changed-7c0ea59c-e774-45af-b163-a886f32640b1. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 580.302115] env[68674]: DEBUG oslo_concurrency.lockutils [req-d8fb7267-ba8d-4f7a-8815-1e8f982b118c req-bf4fc95d-c537-4fb5-8152-53d5d3f6d9f9 service nova] Acquiring lock "refresh_cache-160d9aa2-048d-45a2-ab55-581c8721ac3b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 580.302288] env[68674]: DEBUG oslo_concurrency.lockutils [req-d8fb7267-ba8d-4f7a-8815-1e8f982b118c req-bf4fc95d-c537-4fb5-8152-53d5d3f6d9f9 service nova] Acquired lock "refresh_cache-160d9aa2-048d-45a2-ab55-581c8721ac3b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 580.302463] env[68674]: DEBUG nova.network.neutron [req-d8fb7267-ba8d-4f7a-8815-1e8f982b118c req-bf4fc95d-c537-4fb5-8152-53d5d3f6d9f9 service nova] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Refreshing network info cache for port 7c0ea59c-e774-45af-b163-a886f32640b1 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 580.319180] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e674921d-ad7a-425c-91fd-f9d56eedf034 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lock "e75d2bc7-f356-4443-9641-d9ebf35843cd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.546s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 580.420166] env[68674]: DEBUG oslo_vmware.api [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239394, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.514009] env[68674]: DEBUG nova.network.neutron [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Successfully created port: 1b7ed5a9-214f-4011-b73e-63954c02e25e {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 580.617891] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88f84071-cb2d-4f83-91e9-94b815c7fe08 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.640216] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-959116e5-af3c-45a8-88e2-3a4c4a938c3a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.643550] env[68674]: DEBUG oslo_vmware.api [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525e1742-9ed2-ce4d-2ef2-648bf9fc369c, 'name': SearchDatastore_Task, 'duration_secs': 0.015326} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.644563] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Releasing lock "refresh_cache-503e9328-bbd8-414f-8bea-250ed8247d67" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 580.644563] env[68674]: DEBUG nova.compute.manager [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Instance network_info: |[{"id": "b9f6f4d8-6ea5-4035-a9e6-ed1462036f63", "address": "fa:16:3e:38:28:4f", "network": {"id": "ec06235b-8062-47c5-b224-7a61c2daddb4", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-844083245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cda9cc02a4542dca3a0f16209eb4101", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "085fb0ff-9285-4f1d-a008-a14da4844357", "external-id": "nsx-vlan-transportzone-729", "segmentation_id": 729, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9f6f4d8-6e", "ovs_interfaceid": "b9f6f4d8-6ea5-4035-a9e6-ed1462036f63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 580.645593] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:38:28:4f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '085fb0ff-9285-4f1d-a008-a14da4844357', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b9f6f4d8-6ea5-4035-a9e6-ed1462036f63', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 580.653876] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Creating folder: Project (8cda9cc02a4542dca3a0f16209eb4101). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 580.654515] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43379bb2-178c-4d4e-bdee-a0a6b284b753 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.658293] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-25f27aae-5df6-492a-8dd3-ae0ffa6390fc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.686364] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52876801-c5b0-49b4-bfbc-a51de0c70568 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.690945] env[68674]: DEBUG oslo_vmware.api [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for the task: (returnval){ [ 580.690945] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5218a0ff-53b8-29d3-7531-1dcbc62767bb" [ 580.690945] env[68674]: _type = "Task" [ 580.690945] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.698069] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Created folder: Project (8cda9cc02a4542dca3a0f16209eb4101) in parent group-v647377. [ 580.698069] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Creating folder: Instances. Parent ref: group-v647404. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 580.699679] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14b16765-b734-49d3-9d3d-fc2aec8ee590 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.703564] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d8dc6feb-ff3d-4a7a-b877-445dcc1e03ef {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.708082] env[68674]: DEBUG oslo_vmware.api [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5218a0ff-53b8-29d3-7531-1dcbc62767bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.723990] env[68674]: DEBUG nova.compute.provider_tree [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 580.725644] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Created folder: Instances in parent group-v647404. [ 580.725869] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 580.726250] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 580.726456] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e9321d10-0e54-4f19-96f8-585a2114beda {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.748939] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 580.748939] env[68674]: value = "task-3239397" [ 580.748939] env[68674]: _type = "Task" [ 580.748939] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.758356] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239397, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.783962] env[68674]: INFO nova.compute.manager [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Took 21.11 seconds to build instance. [ 580.820339] env[68674]: DEBUG nova.compute.manager [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 580.923024] env[68674]: DEBUG oslo_vmware.api [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239394, 'name': ReconfigVM_Task, 'duration_secs': 0.56943} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.925652] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Reconfigured VM instance instance-00000004 to attach disk [datastore2] 89ccc16e-d0e5-4f7d-985c-8693188e7ed5/89ccc16e-d0e5-4f7d-985c-8693188e7ed5.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 580.926426] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c4301510-cd47-4a26-9070-dd067271454a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.935719] env[68674]: DEBUG oslo_vmware.api [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Waiting for the task: (returnval){ [ 580.935719] env[68674]: value = "task-3239398" [ 580.935719] env[68674]: _type = "Task" [ 580.935719] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.945274] env[68674]: DEBUG oslo_vmware.api [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239398, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.207016] env[68674]: DEBUG oslo_vmware.api [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5218a0ff-53b8-29d3-7531-1dcbc62767bb, 'name': SearchDatastore_Task, 'duration_secs': 0.020828} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.207161] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 581.207858] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b/7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 581.207858] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5834315e-31f9-4fb8-ae0a-69b5a1c66a8c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.217141] env[68674]: DEBUG oslo_vmware.api [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for the task: (returnval){ [ 581.217141] env[68674]: value = "task-3239399" [ 581.217141] env[68674]: _type = "Task" [ 581.217141] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.228527] env[68674]: DEBUG nova.compute.manager [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 581.231538] env[68674]: DEBUG nova.scheduler.client.report [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 581.234718] env[68674]: DEBUG oslo_vmware.api [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239399, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.264020] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239397, 'name': CreateVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.271920] env[68674]: DEBUG nova.virt.hardware [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 581.272744] env[68674]: DEBUG nova.virt.hardware [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 581.273422] env[68674]: DEBUG nova.virt.hardware [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 581.273422] env[68674]: DEBUG nova.virt.hardware [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 581.273550] env[68674]: DEBUG nova.virt.hardware [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 581.273670] env[68674]: DEBUG nova.virt.hardware [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 581.273899] env[68674]: DEBUG nova.virt.hardware [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 581.274213] env[68674]: DEBUG nova.virt.hardware [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 581.275025] env[68674]: DEBUG nova.virt.hardware [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 581.275181] env[68674]: DEBUG nova.virt.hardware [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 581.275369] env[68674]: DEBUG nova.virt.hardware [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 581.276585] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f3be69c-c806-440d-bb56-6be29fd9621e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.285557] env[68674]: DEBUG oslo_concurrency.lockutils [None req-43462bf3-f981-48a9-beea-97e5e33c4ca7 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lock "160d9aa2-048d-45a2-ab55-581c8721ac3b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.619s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 581.288483] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-978a1b22-0fe9-4191-a0f7-cf62a50c5fce {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.347194] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 581.447872] env[68674]: DEBUG oslo_vmware.api [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239398, 'name': Rename_Task, 'duration_secs': 0.197032} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.447872] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 581.447872] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-66549dc3-f06c-4635-86a6-e25af76abc18 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.455797] env[68674]: DEBUG oslo_vmware.api [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Waiting for the task: (returnval){ [ 581.455797] env[68674]: value = "task-3239400" [ 581.455797] env[68674]: _type = "Task" [ 581.455797] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.465596] env[68674]: DEBUG oslo_vmware.api [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239400, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.490095] env[68674]: DEBUG nova.network.neutron [req-d8fb7267-ba8d-4f7a-8815-1e8f982b118c req-bf4fc95d-c537-4fb5-8152-53d5d3f6d9f9 service nova] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Updated VIF entry in instance network info cache for port 7c0ea59c-e774-45af-b163-a886f32640b1. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 581.490253] env[68674]: DEBUG nova.network.neutron [req-d8fb7267-ba8d-4f7a-8815-1e8f982b118c req-bf4fc95d-c537-4fb5-8152-53d5d3f6d9f9 service nova] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Updating instance_info_cache with network_info: [{"id": "7c0ea59c-e774-45af-b163-a886f32640b1", "address": "fa:16:3e:3a:13:ce", "network": {"id": "51117274-b276-4648-9ae8-13d7da21bce9", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1367196897-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be6579f30b2c418c98a5a373176baf05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "713e54d5-283f-493d-b003-f13182deaf7b", "external-id": "cl2-zone-703", "segmentation_id": 703, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c0ea59c-e7", "ovs_interfaceid": "7c0ea59c-e774-45af-b163-a886f32640b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 581.592369] env[68674]: DEBUG nova.compute.manager [None req-b04ad53a-8d2f-4c6a-b25f-2835d0c0dba0 tempest-ServerDiagnosticsV248Test-735375133 tempest-ServerDiagnosticsV248Test-735375133-project-admin] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 581.595203] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb755bb-7348-44ff-a38f-dcef582d3e5a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.605560] env[68674]: INFO nova.compute.manager [None req-b04ad53a-8d2f-4c6a-b25f-2835d0c0dba0 tempest-ServerDiagnosticsV248Test-735375133 tempest-ServerDiagnosticsV248Test-735375133-project-admin] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Retrieving diagnostics [ 581.606484] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28d37c21-5712-4bd7-ba07-0afb021b601e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.730224] env[68674]: DEBUG oslo_vmware.api [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239399, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.736669] env[68674]: DEBUG oslo_concurrency.lockutils [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.553s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 581.737399] env[68674]: DEBUG nova.compute.manager [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 581.740265] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.630s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 581.742623] env[68674]: INFO nova.compute.claims [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 581.762035] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239397, 'name': CreateVM_Task, 'duration_secs': 0.518342} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.762287] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 581.763146] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 581.763573] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 581.765357] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 581.765357] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92609d2d-3d94-4a69-b71e-db03b6f5576c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.771574] env[68674]: DEBUG oslo_vmware.api [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Waiting for the task: (returnval){ [ 581.771574] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523f889a-2d2d-3cc3-5523-88d4baf67e8b" [ 581.771574] env[68674]: _type = "Task" [ 581.771574] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.786210] env[68674]: DEBUG oslo_vmware.api [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523f889a-2d2d-3cc3-5523-88d4baf67e8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.796038] env[68674]: DEBUG nova.compute.manager [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 581.971053] env[68674]: DEBUG oslo_vmware.api [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239400, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.993475] env[68674]: DEBUG oslo_concurrency.lockutils [req-d8fb7267-ba8d-4f7a-8815-1e8f982b118c req-bf4fc95d-c537-4fb5-8152-53d5d3f6d9f9 service nova] Releasing lock "refresh_cache-160d9aa2-048d-45a2-ab55-581c8721ac3b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 581.993573] env[68674]: DEBUG nova.compute.manager [req-d8fb7267-ba8d-4f7a-8815-1e8f982b118c req-bf4fc95d-c537-4fb5-8152-53d5d3f6d9f9 service nova] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Received event network-vif-plugged-40a38082-1691-4d4b-9fce-f07687409a92 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 581.993723] env[68674]: DEBUG oslo_concurrency.lockutils [req-d8fb7267-ba8d-4f7a-8815-1e8f982b118c req-bf4fc95d-c537-4fb5-8152-53d5d3f6d9f9 service nova] Acquiring lock "7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 581.993723] env[68674]: DEBUG oslo_concurrency.lockutils [req-d8fb7267-ba8d-4f7a-8815-1e8f982b118c req-bf4fc95d-c537-4fb5-8152-53d5d3f6d9f9 service nova] Lock "7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 581.993723] env[68674]: DEBUG oslo_concurrency.lockutils [req-d8fb7267-ba8d-4f7a-8815-1e8f982b118c req-bf4fc95d-c537-4fb5-8152-53d5d3f6d9f9 service nova] Lock "7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 581.994300] env[68674]: DEBUG nova.compute.manager [req-d8fb7267-ba8d-4f7a-8815-1e8f982b118c req-bf4fc95d-c537-4fb5-8152-53d5d3f6d9f9 service nova] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] No waiting events found dispatching network-vif-plugged-40a38082-1691-4d4b-9fce-f07687409a92 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 581.994463] env[68674]: WARNING nova.compute.manager [req-d8fb7267-ba8d-4f7a-8815-1e8f982b118c req-bf4fc95d-c537-4fb5-8152-53d5d3f6d9f9 service nova] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Received unexpected event network-vif-plugged-40a38082-1691-4d4b-9fce-f07687409a92 for instance with vm_state building and task_state spawning. [ 581.994888] env[68674]: DEBUG nova.compute.manager [req-d8fb7267-ba8d-4f7a-8815-1e8f982b118c req-bf4fc95d-c537-4fb5-8152-53d5d3f6d9f9 service nova] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Received event network-changed-40a38082-1691-4d4b-9fce-f07687409a92 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 581.994888] env[68674]: DEBUG nova.compute.manager [req-d8fb7267-ba8d-4f7a-8815-1e8f982b118c req-bf4fc95d-c537-4fb5-8152-53d5d3f6d9f9 service nova] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Refreshing instance network info cache due to event network-changed-40a38082-1691-4d4b-9fce-f07687409a92. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 581.995109] env[68674]: DEBUG oslo_concurrency.lockutils [req-d8fb7267-ba8d-4f7a-8815-1e8f982b118c req-bf4fc95d-c537-4fb5-8152-53d5d3f6d9f9 service nova] Acquiring lock "refresh_cache-7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 581.995591] env[68674]: DEBUG oslo_concurrency.lockutils [req-d8fb7267-ba8d-4f7a-8815-1e8f982b118c req-bf4fc95d-c537-4fb5-8152-53d5d3f6d9f9 service nova] Acquired lock "refresh_cache-7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 581.995591] env[68674]: DEBUG nova.network.neutron [req-d8fb7267-ba8d-4f7a-8815-1e8f982b118c req-bf4fc95d-c537-4fb5-8152-53d5d3f6d9f9 service nova] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Refreshing network info cache for port 40a38082-1691-4d4b-9fce-f07687409a92 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 582.231299] env[68674]: DEBUG oslo_vmware.api [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239399, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.627381} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.231299] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b/7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 582.231299] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 582.231599] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f0338213-644b-42fb-8b4a-a1cc94319eb7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.241583] env[68674]: DEBUG oslo_vmware.api [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for the task: (returnval){ [ 582.241583] env[68674]: value = "task-3239401" [ 582.241583] env[68674]: _type = "Task" [ 582.241583] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.247976] env[68674]: DEBUG nova.compute.utils [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 582.255091] env[68674]: DEBUG nova.compute.manager [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 582.255668] env[68674]: DEBUG nova.network.neutron [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 582.269411] env[68674]: DEBUG oslo_vmware.api [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239401, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.285891] env[68674]: DEBUG oslo_vmware.api [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523f889a-2d2d-3cc3-5523-88d4baf67e8b, 'name': SearchDatastore_Task, 'duration_secs': 0.02446} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.286366] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 582.286717] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 582.287224] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 582.287436] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 582.288061] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 582.288287] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ef38ef5a-9dc0-42f6-8aa4-7b2f316b929c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.307318] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 582.307545] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 582.312056] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-291caee3-db27-4e22-8e13-56083d617e2d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.322218] env[68674]: DEBUG oslo_vmware.api [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Waiting for the task: (returnval){ [ 582.322218] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52e0546e-1207-5802-7433-6829cf727d49" [ 582.322218] env[68674]: _type = "Task" [ 582.322218] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.334850] env[68674]: DEBUG oslo_vmware.api [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52e0546e-1207-5802-7433-6829cf727d49, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.343535] env[68674]: DEBUG oslo_concurrency.lockutils [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 582.410047] env[68674]: DEBUG nova.policy [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0dc14d246a0b416fac0bbbf125f5d549', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f90a5acbb1c14d0480034fe257671d5a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 582.472981] env[68674]: DEBUG oslo_vmware.api [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239400, 'name': PowerOnVM_Task, 'duration_secs': 0.928468} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.475743] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 582.475836] env[68674]: DEBUG nova.compute.manager [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 582.476598] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbdfd53e-bb92-49af-9bf3-fcbc1ddc0d32 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.633915] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Acquiring lock "ae945f3f-fde8-4b25-a5bd-81014fc99690" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 582.633915] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Lock "ae945f3f-fde8-4b25-a5bd-81014fc99690" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 582.754724] env[68674]: DEBUG oslo_vmware.api [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239401, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094409} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.755049] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 582.756035] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57cb1b5c-baab-48bd-90d8-5d767f1c0670 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.785268] env[68674]: DEBUG nova.compute.manager [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 582.804473] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Reconfiguring VM instance instance-0000000a to attach disk [datastore1] 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b/7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 582.808014] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-99bea9f6-7923-46a1-a091-357816b4dec0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.848232] env[68674]: DEBUG oslo_vmware.api [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52e0546e-1207-5802-7433-6829cf727d49, 'name': SearchDatastore_Task, 'duration_secs': 0.01802} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.848480] env[68674]: DEBUG oslo_vmware.api [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for the task: (returnval){ [ 582.848480] env[68674]: value = "task-3239402" [ 582.848480] env[68674]: _type = "Task" [ 582.848480] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.851081] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74f88513-2a9f-41ec-92af-73f45811b6ee {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.863727] env[68674]: DEBUG oslo_vmware.api [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Waiting for the task: (returnval){ [ 582.863727] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ba228a-1278-d343-ad7e-a31c7da0a819" [ 582.863727] env[68674]: _type = "Task" [ 582.863727] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.867831] env[68674]: DEBUG oslo_vmware.api [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239402, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.881456] env[68674]: DEBUG oslo_vmware.api [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ba228a-1278-d343-ad7e-a31c7da0a819, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.936138] env[68674]: DEBUG nova.network.neutron [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Successfully updated port: 1b7ed5a9-214f-4011-b73e-63954c02e25e {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 583.003624] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 583.239178] env[68674]: DEBUG oslo_concurrency.lockutils [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Acquiring lock "fa89e0b5-590d-43fb-bb11-02f8fdee0c2f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 583.239356] env[68674]: DEBUG oslo_concurrency.lockutils [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Lock "fa89e0b5-590d-43fb-bb11-02f8fdee0c2f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 583.310160] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6caa5655-e251-4c09-b440-34c42f02ed8a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.326603] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87c15297-e97b-46cb-bfe0-70b420404473 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.336491] env[68674]: DEBUG oslo_concurrency.lockutils [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "3463e09e-dc2f-432c-9eff-8192c2616240" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 583.336846] env[68674]: DEBUG oslo_concurrency.lockutils [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "3463e09e-dc2f-432c-9eff-8192c2616240" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 583.376171] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e792512-58f8-4a6f-9966-281eaff36488 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.386821] env[68674]: DEBUG oslo_vmware.api [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239402, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.393310] env[68674]: DEBUG oslo_vmware.api [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ba228a-1278-d343-ad7e-a31c7da0a819, 'name': SearchDatastore_Task, 'duration_secs': 0.023783} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.393656] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 583.394292] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 503e9328-bbd8-414f-8bea-250ed8247d67/503e9328-bbd8-414f-8bea-250ed8247d67.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 583.395180] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-646b8733-8525-4f6f-88a9-0b26b172cb3d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.400178] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-84b53837-7bc5-4b65-90c0-9804e9a06424 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.416243] env[68674]: DEBUG nova.compute.provider_tree [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 583.419191] env[68674]: DEBUG oslo_vmware.api [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Waiting for the task: (returnval){ [ 583.419191] env[68674]: value = "task-3239403" [ 583.419191] env[68674]: _type = "Task" [ 583.419191] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.430713] env[68674]: DEBUG oslo_vmware.api [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Task: {'id': task-3239403, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.438053] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "refresh_cache-3d85c8c4-f09c-4f75-aff5-9a49d84ae006" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.438286] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquired lock "refresh_cache-3d85c8c4-f09c-4f75-aff5-9a49d84ae006" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 583.438460] env[68674]: DEBUG nova.network.neutron [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 583.522757] env[68674]: DEBUG nova.network.neutron [req-d8fb7267-ba8d-4f7a-8815-1e8f982b118c req-bf4fc95d-c537-4fb5-8152-53d5d3f6d9f9 service nova] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Updated VIF entry in instance network info cache for port 40a38082-1691-4d4b-9fce-f07687409a92. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 583.522757] env[68674]: DEBUG nova.network.neutron [req-d8fb7267-ba8d-4f7a-8815-1e8f982b118c req-bf4fc95d-c537-4fb5-8152-53d5d3f6d9f9 service nova] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Updating instance_info_cache with network_info: [{"id": "40a38082-1691-4d4b-9fce-f07687409a92", "address": "fa:16:3e:9b:97:af", "network": {"id": "51117274-b276-4648-9ae8-13d7da21bce9", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1367196897-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be6579f30b2c418c98a5a373176baf05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "713e54d5-283f-493d-b003-f13182deaf7b", "external-id": "cl2-zone-703", "segmentation_id": 703, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40a38082-16", "ovs_interfaceid": "40a38082-1691-4d4b-9fce-f07687409a92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 583.816635] env[68674]: DEBUG nova.compute.manager [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 583.825556] env[68674]: DEBUG nova.network.neutron [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Successfully created port: 5da6718a-7772-42cf-869d-77f84c2984f9 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 583.844615] env[68674]: DEBUG nova.virt.hardware [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 583.845343] env[68674]: DEBUG nova.virt.hardware [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 583.845343] env[68674]: DEBUG nova.virt.hardware [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 583.845343] env[68674]: DEBUG nova.virt.hardware [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 583.845343] env[68674]: DEBUG nova.virt.hardware [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 583.845591] env[68674]: DEBUG nova.virt.hardware [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 583.845666] env[68674]: DEBUG nova.virt.hardware [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 583.845814] env[68674]: DEBUG nova.virt.hardware [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 583.845968] env[68674]: DEBUG nova.virt.hardware [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 583.846196] env[68674]: DEBUG nova.virt.hardware [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 583.846376] env[68674]: DEBUG nova.virt.hardware [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 583.847358] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-039425c0-62fc-4740-8857-5764b9fef837 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.856649] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12ff7d19-5c3f-4018-8177-b3cf87015da1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.882208] env[68674]: DEBUG oslo_vmware.api [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239402, 'name': ReconfigVM_Task, 'duration_secs': 0.742118} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.882208] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Reconfigured VM instance instance-0000000a to attach disk [datastore1] 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b/7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 583.882833] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a05ee381-8b12-43ca-a054-1a5fb54c0148 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.892101] env[68674]: DEBUG oslo_vmware.api [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for the task: (returnval){ [ 583.892101] env[68674]: value = "task-3239404" [ 583.892101] env[68674]: _type = "Task" [ 583.892101] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.902087] env[68674]: DEBUG oslo_vmware.api [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239404, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.927953] env[68674]: DEBUG nova.scheduler.client.report [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 583.940139] env[68674]: DEBUG oslo_vmware.api [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Task: {'id': task-3239403, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.011162] env[68674]: DEBUG nova.network.neutron [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 584.029556] env[68674]: DEBUG oslo_concurrency.lockutils [req-d8fb7267-ba8d-4f7a-8815-1e8f982b118c req-bf4fc95d-c537-4fb5-8152-53d5d3f6d9f9 service nova] Releasing lock "refresh_cache-7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 584.412406] env[68674]: DEBUG oslo_vmware.api [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239404, 'name': Rename_Task, 'duration_secs': 0.232502} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.412406] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 584.412406] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7b9005b4-3372-4f10-a56d-d430ecab602c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.422411] env[68674]: DEBUG oslo_vmware.api [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for the task: (returnval){ [ 584.422411] env[68674]: value = "task-3239408" [ 584.422411] env[68674]: _type = "Task" [ 584.422411] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.441398] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.698s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 584.442129] env[68674]: DEBUG nova.compute.manager [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 584.448768] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.158s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 584.450351] env[68674]: INFO nova.compute.claims [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 584.454527] env[68674]: DEBUG oslo_vmware.api [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Task: {'id': task-3239403, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.915559} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.454527] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 503e9328-bbd8-414f-8bea-250ed8247d67/503e9328-bbd8-414f-8bea-250ed8247d67.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 584.454749] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 584.455669] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5e8aa1df-244e-4b85-b6e1-a5a9b7ad171e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.464888] env[68674]: DEBUG oslo_vmware.api [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239408, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.478052] env[68674]: DEBUG oslo_vmware.api [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Waiting for the task: (returnval){ [ 584.478052] env[68674]: value = "task-3239410" [ 584.478052] env[68674]: _type = "Task" [ 584.478052] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.487326] env[68674]: DEBUG oslo_vmware.api [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Task: {'id': task-3239410, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.493880] env[68674]: DEBUG nova.network.neutron [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Updating instance_info_cache with network_info: [{"id": "1b7ed5a9-214f-4011-b73e-63954c02e25e", "address": "fa:16:3e:4f:37:80", "network": {"id": "cd9a6296-fa96-4117-b8b5-3884d0d16745", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1543887384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61ea6bfeb37d470a970e9c98e4827ade", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b7ed5a9-21", "ovs_interfaceid": "1b7ed5a9-214f-4011-b73e-63954c02e25e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 584.589018] env[68674]: DEBUG nova.compute.manager [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 584.589018] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-522b240a-a90a-4871-8f63-c1778a227481 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.940565] env[68674]: DEBUG oslo_vmware.api [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239408, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.960324] env[68674]: DEBUG nova.compute.utils [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 584.962621] env[68674]: DEBUG nova.compute.manager [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 584.962925] env[68674]: DEBUG nova.network.neutron [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 584.991583] env[68674]: DEBUG oslo_vmware.api [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Task: {'id': task-3239410, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.131852} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.992234] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 584.993078] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30055eb3-876a-4380-9667-e15f26a30156 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.997517] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Releasing lock "refresh_cache-3d85c8c4-f09c-4f75-aff5-9a49d84ae006" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 584.997517] env[68674]: DEBUG nova.compute.manager [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Instance network_info: |[{"id": "1b7ed5a9-214f-4011-b73e-63954c02e25e", "address": "fa:16:3e:4f:37:80", "network": {"id": "cd9a6296-fa96-4117-b8b5-3884d0d16745", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1543887384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61ea6bfeb37d470a970e9c98e4827ade", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b7ed5a9-21", "ovs_interfaceid": "1b7ed5a9-214f-4011-b73e-63954c02e25e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 584.997738] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4f:37:80', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '40c947c4-f471-4d48-8e43-fee54198107e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1b7ed5a9-214f-4011-b73e-63954c02e25e', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 585.012571] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Creating folder: Project (61ea6bfeb37d470a970e9c98e4827ade). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 585.014598] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ab65fc95-2154-4e19-8e18-fac152a3df4c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.037568] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Reconfiguring VM instance instance-0000000b to attach disk [datastore2] 503e9328-bbd8-414f-8bea-250ed8247d67/503e9328-bbd8-414f-8bea-250ed8247d67.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 585.039325] env[68674]: DEBUG nova.policy [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eaf919eaf2c344da9bba87d62e4c4f3f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c1c92eae8a804dda9dc372705034e8be', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 585.041933] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a550f490-b5d3-4870-b331-0e1edc3fe735 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.058900] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Created folder: Project (61ea6bfeb37d470a970e9c98e4827ade) in parent group-v647377. [ 585.059197] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Creating folder: Instances. Parent ref: group-v647410. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 585.059867] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ed4f41f7-a4d3-4146-92de-b0e3fd2b4099 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.066879] env[68674]: DEBUG oslo_vmware.api [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Waiting for the task: (returnval){ [ 585.066879] env[68674]: value = "task-3239412" [ 585.066879] env[68674]: _type = "Task" [ 585.066879] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.073314] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Created folder: Instances in parent group-v647410. [ 585.073314] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 585.073702] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 585.074261] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-464cbb5e-7862-4b87-ab8a-25ccf5328e95 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.108034] env[68674]: INFO nova.compute.manager [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] instance snapshotting [ 585.108174] env[68674]: WARNING nova.compute.manager [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 585.109803] env[68674]: DEBUG oslo_vmware.api [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Task: {'id': task-3239412, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.111558] env[68674]: DEBUG nova.compute.manager [req-a61a4b82-d3de-486f-9ec7-de2c7e5bee0f req-a22db474-de76-4d76-93ee-0d043ad8b1c9 service nova] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Received event network-vif-plugged-b9f6f4d8-6ea5-4035-a9e6-ed1462036f63 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 585.111558] env[68674]: DEBUG oslo_concurrency.lockutils [req-a61a4b82-d3de-486f-9ec7-de2c7e5bee0f req-a22db474-de76-4d76-93ee-0d043ad8b1c9 service nova] Acquiring lock "503e9328-bbd8-414f-8bea-250ed8247d67-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 585.111838] env[68674]: DEBUG oslo_concurrency.lockutils [req-a61a4b82-d3de-486f-9ec7-de2c7e5bee0f req-a22db474-de76-4d76-93ee-0d043ad8b1c9 service nova] Lock "503e9328-bbd8-414f-8bea-250ed8247d67-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 585.112076] env[68674]: DEBUG oslo_concurrency.lockutils [req-a61a4b82-d3de-486f-9ec7-de2c7e5bee0f req-a22db474-de76-4d76-93ee-0d043ad8b1c9 service nova] Lock "503e9328-bbd8-414f-8bea-250ed8247d67-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 585.112283] env[68674]: DEBUG nova.compute.manager [req-a61a4b82-d3de-486f-9ec7-de2c7e5bee0f req-a22db474-de76-4d76-93ee-0d043ad8b1c9 service nova] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] No waiting events found dispatching network-vif-plugged-b9f6f4d8-6ea5-4035-a9e6-ed1462036f63 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 585.112513] env[68674]: WARNING nova.compute.manager [req-a61a4b82-d3de-486f-9ec7-de2c7e5bee0f req-a22db474-de76-4d76-93ee-0d043ad8b1c9 service nova] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Received unexpected event network-vif-plugged-b9f6f4d8-6ea5-4035-a9e6-ed1462036f63 for instance with vm_state building and task_state spawning. [ 585.112860] env[68674]: DEBUG nova.compute.manager [req-a61a4b82-d3de-486f-9ec7-de2c7e5bee0f req-a22db474-de76-4d76-93ee-0d043ad8b1c9 service nova] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Received event network-changed-b9f6f4d8-6ea5-4035-a9e6-ed1462036f63 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 585.112975] env[68674]: DEBUG nova.compute.manager [req-a61a4b82-d3de-486f-9ec7-de2c7e5bee0f req-a22db474-de76-4d76-93ee-0d043ad8b1c9 service nova] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Refreshing instance network info cache due to event network-changed-b9f6f4d8-6ea5-4035-a9e6-ed1462036f63. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 585.113249] env[68674]: DEBUG oslo_concurrency.lockutils [req-a61a4b82-d3de-486f-9ec7-de2c7e5bee0f req-a22db474-de76-4d76-93ee-0d043ad8b1c9 service nova] Acquiring lock "refresh_cache-503e9328-bbd8-414f-8bea-250ed8247d67" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.113430] env[68674]: DEBUG oslo_concurrency.lockutils [req-a61a4b82-d3de-486f-9ec7-de2c7e5bee0f req-a22db474-de76-4d76-93ee-0d043ad8b1c9 service nova] Acquired lock "refresh_cache-503e9328-bbd8-414f-8bea-250ed8247d67" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 585.113685] env[68674]: DEBUG nova.network.neutron [req-a61a4b82-d3de-486f-9ec7-de2c7e5bee0f req-a22db474-de76-4d76-93ee-0d043ad8b1c9 service nova] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Refreshing network info cache for port b9f6f4d8-6ea5-4035-a9e6-ed1462036f63 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 585.117525] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4429593b-4a65-4fea-9482-3a8d2e96f40c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.129646] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 585.129646] env[68674]: value = "task-3239414" [ 585.129646] env[68674]: _type = "Task" [ 585.129646] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.162088] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f36d940-c8c3-46f0-8f73-6a8f7ad1ff9e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.176155] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239414, 'name': CreateVM_Task} progress is 15%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.196246] env[68674]: DEBUG oslo_concurrency.lockutils [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Acquiring lock "627fb348-1749-4480-97b9-b479a182d4ee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 585.196409] env[68674]: DEBUG oslo_concurrency.lockutils [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Lock "627fb348-1749-4480-97b9-b479a182d4ee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 585.444021] env[68674]: DEBUG oslo_vmware.api [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239408, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.467632] env[68674]: DEBUG nova.compute.manager [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 585.594914] env[68674]: DEBUG oslo_vmware.api [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Task: {'id': task-3239412, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.671377] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239414, 'name': CreateVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.685389] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Creating Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 585.686115] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-65e9e0cf-2fb2-4d85-8575-3c7059e92e63 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.699956] env[68674]: DEBUG oslo_vmware.api [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 585.699956] env[68674]: value = "task-3239415" [ 585.699956] env[68674]: _type = "Task" [ 585.699956] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.723242] env[68674]: DEBUG oslo_vmware.api [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239415, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.948157] env[68674]: DEBUG oslo_vmware.api [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239408, 'name': PowerOnVM_Task, 'duration_secs': 1.292856} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.948662] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 585.949123] env[68674]: INFO nova.compute.manager [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Took 13.63 seconds to spawn the instance on the hypervisor. [ 585.949509] env[68674]: DEBUG nova.compute.manager [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 585.950636] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4d65056-aa96-4b5b-bfac-8bbe336f47fe {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.071097] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ece50c6-2a84-4ff0-9191-2335aed54fbc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.090959] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f54f08f-ca53-41e7-be4c-f796a310f7b6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.094476] env[68674]: DEBUG oslo_vmware.api [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Task: {'id': task-3239412, 'name': ReconfigVM_Task, 'duration_secs': 0.567566} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.094777] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Reconfigured VM instance instance-0000000b to attach disk [datastore2] 503e9328-bbd8-414f-8bea-250ed8247d67/503e9328-bbd8-414f-8bea-250ed8247d67.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 586.095976] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f4fe5300-d3fa-4435-8e92-cdd85ef0948a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.133689] env[68674]: DEBUG nova.network.neutron [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Successfully created port: 2bfad8ae-7c3e-4d21-9729-4f11937e5698 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 586.139062] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c812021a-4db7-4214-9425-e2335ad896f0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.142144] env[68674]: DEBUG oslo_vmware.api [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Waiting for the task: (returnval){ [ 586.142144] env[68674]: value = "task-3239416" [ 586.142144] env[68674]: _type = "Task" [ 586.142144] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.150510] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bec716c-7402-496c-a107-1b7d86639129 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.164409] env[68674]: DEBUG oslo_vmware.api [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Task: {'id': task-3239416, 'name': Rename_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.175099] env[68674]: DEBUG nova.compute.provider_tree [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 586.183315] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239414, 'name': CreateVM_Task, 'duration_secs': 0.58018} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.183504] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 586.184209] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 586.184365] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 586.184696] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 586.184971] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f6543b0-d47a-4cdc-861b-3614d2358b29 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.191074] env[68674]: DEBUG oslo_vmware.api [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 586.191074] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5293558f-f7f2-4fd8-38d0-9972c13507f2" [ 586.191074] env[68674]: _type = "Task" [ 586.191074] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.202717] env[68674]: DEBUG oslo_vmware.api [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5293558f-f7f2-4fd8-38d0-9972c13507f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.213861] env[68674]: DEBUG oslo_vmware.api [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239415, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.337871] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Acquiring lock "f9168b78-ed64-4109-84f0-db0af61d2f10" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 586.337936] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Lock "f9168b78-ed64-4109-84f0-db0af61d2f10" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 586.519138] env[68674]: DEBUG nova.compute.manager [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 586.530837] env[68674]: INFO nova.compute.manager [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Took 25.03 seconds to build instance. [ 586.563201] env[68674]: DEBUG nova.virt.hardware [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 586.565176] env[68674]: DEBUG nova.virt.hardware [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 586.565420] env[68674]: DEBUG nova.virt.hardware [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 586.565594] env[68674]: DEBUG nova.virt.hardware [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 586.565783] env[68674]: DEBUG nova.virt.hardware [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 586.565952] env[68674]: DEBUG nova.virt.hardware [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 586.566219] env[68674]: DEBUG nova.virt.hardware [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 586.566418] env[68674]: DEBUG nova.virt.hardware [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 586.566597] env[68674]: DEBUG nova.virt.hardware [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 586.566875] env[68674]: DEBUG nova.virt.hardware [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 586.568035] env[68674]: DEBUG nova.virt.hardware [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 586.568035] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0f95dad-05ee-4ef4-af29-8ca012d44cb8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.580548] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22ea8ba3-b0f5-4c88-9a39-b9eb80c6bfcf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.654670] env[68674]: DEBUG oslo_vmware.api [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Task: {'id': task-3239416, 'name': Rename_Task, 'duration_secs': 0.234845} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.654670] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 586.654670] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-44ec585d-fff9-41e6-97af-28667fa7f87f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.663604] env[68674]: DEBUG oslo_vmware.api [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Waiting for the task: (returnval){ [ 586.663604] env[68674]: value = "task-3239417" [ 586.663604] env[68674]: _type = "Task" [ 586.663604] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.678918] env[68674]: DEBUG nova.scheduler.client.report [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 586.689197] env[68674]: DEBUG oslo_vmware.api [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Task: {'id': task-3239417, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.692330] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Acquiring lock "a62237a7-a123-4378-b655-d489ef08474b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 586.692600] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Lock "a62237a7-a123-4378-b655-d489ef08474b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 586.706300] env[68674]: DEBUG oslo_vmware.api [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5293558f-f7f2-4fd8-38d0-9972c13507f2, 'name': SearchDatastore_Task, 'duration_secs': 0.018474} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.714263] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 586.714710] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 586.715073] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 586.715270] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 586.715508] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 586.718548] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-64b068b4-1592-42d7-aa59-68f6ce61d9a0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.732340] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Acquiring lock "b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 586.732663] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Lock "b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 586.732931] env[68674]: DEBUG oslo_vmware.api [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239415, 'name': CreateSnapshot_Task, 'duration_secs': 0.736049} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.734820] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Created Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 586.735633] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 586.735928] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 586.737386] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fe7b081-e05d-460b-b442-dad46e8de1a2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.741204] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4bcc7c0-e65c-4338-b465-45c9b8b03be5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.749168] env[68674]: DEBUG oslo_vmware.api [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 586.749168] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]520a02a8-aa5e-e0ab-4482-2e1df1b79c24" [ 586.749168] env[68674]: _type = "Task" [ 586.749168] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.764411] env[68674]: DEBUG nova.network.neutron [req-a61a4b82-d3de-486f-9ec7-de2c7e5bee0f req-a22db474-de76-4d76-93ee-0d043ad8b1c9 service nova] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Updated VIF entry in instance network info cache for port b9f6f4d8-6ea5-4035-a9e6-ed1462036f63. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 586.764879] env[68674]: DEBUG nova.network.neutron [req-a61a4b82-d3de-486f-9ec7-de2c7e5bee0f req-a22db474-de76-4d76-93ee-0d043ad8b1c9 service nova] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Updating instance_info_cache with network_info: [{"id": "b9f6f4d8-6ea5-4035-a9e6-ed1462036f63", "address": "fa:16:3e:38:28:4f", "network": {"id": "ec06235b-8062-47c5-b224-7a61c2daddb4", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-844083245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cda9cc02a4542dca3a0f16209eb4101", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "085fb0ff-9285-4f1d-a008-a14da4844357", "external-id": "nsx-vlan-transportzone-729", "segmentation_id": 729, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9f6f4d8-6e", "ovs_interfaceid": "b9f6f4d8-6ea5-4035-a9e6-ed1462036f63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 586.766857] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Acquiring lock "275cdfcc-06f0-4c29-b18b-55cde38480a3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 586.767108] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Lock "275cdfcc-06f0-4c29-b18b-55cde38480a3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 586.776469] env[68674]: DEBUG oslo_vmware.api [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]520a02a8-aa5e-e0ab-4482-2e1df1b79c24, 'name': SearchDatastore_Task, 'duration_secs': 0.016232} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.778050] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbdf032b-5d8b-457c-bb24-41470fb8c251 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.786368] env[68674]: DEBUG oslo_vmware.api [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 586.786368] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]529506de-0781-6f0e-4657-5af156996b38" [ 586.786368] env[68674]: _type = "Task" [ 586.786368] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.798276] env[68674]: DEBUG oslo_vmware.api [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]529506de-0781-6f0e-4657-5af156996b38, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.034087] env[68674]: DEBUG nova.network.neutron [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Successfully updated port: 5da6718a-7772-42cf-869d-77f84c2984f9 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 587.038028] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dcfc32ed-d38b-41e1-8e7b-b25e622f6474 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lock "7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.551s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 587.179374] env[68674]: DEBUG oslo_vmware.api [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Task: {'id': task-3239417, 'name': PowerOnVM_Task} progress is 78%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.198593] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.746s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 587.198593] env[68674]: DEBUG nova.compute.manager [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 587.202358] env[68674]: DEBUG oslo_concurrency.lockutils [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.498s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 587.206158] env[68674]: INFO nova.compute.claims [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 587.276290] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Creating linked-clone VM from snapshot {{(pid=68674) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 587.277167] env[68674]: DEBUG oslo_concurrency.lockutils [req-a61a4b82-d3de-486f-9ec7-de2c7e5bee0f req-a22db474-de76-4d76-93ee-0d043ad8b1c9 service nova] Releasing lock "refresh_cache-503e9328-bbd8-414f-8bea-250ed8247d67" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 587.277505] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-fbbcd7f8-a1e3-49ea-8955-10154e330233 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.299618] env[68674]: DEBUG oslo_vmware.api [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 587.299618] env[68674]: value = "task-3239418" [ 587.299618] env[68674]: _type = "Task" [ 587.299618] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.306217] env[68674]: DEBUG oslo_vmware.api [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]529506de-0781-6f0e-4657-5af156996b38, 'name': SearchDatastore_Task, 'duration_secs': 0.014939} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.310265] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 587.310483] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 3d85c8c4-f09c-4f75-aff5-9a49d84ae006/3d85c8c4-f09c-4f75-aff5-9a49d84ae006.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 587.311054] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d50b1274-1754-4e62-bb3b-891833be77ee {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.325855] env[68674]: DEBUG oslo_vmware.api [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239418, 'name': CloneVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.331134] env[68674]: DEBUG oslo_vmware.api [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 587.331134] env[68674]: value = "task-3239419" [ 587.331134] env[68674]: _type = "Task" [ 587.331134] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.343237] env[68674]: DEBUG oslo_vmware.api [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239419, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.537275] env[68674]: DEBUG oslo_concurrency.lockutils [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Acquiring lock "refresh_cache-d1c7a508-7d45-4eff-bb06-b85bfe392772" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 587.537275] env[68674]: DEBUG oslo_concurrency.lockutils [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Acquired lock "refresh_cache-d1c7a508-7d45-4eff-bb06-b85bfe392772" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 587.537275] env[68674]: DEBUG nova.network.neutron [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 587.542722] env[68674]: DEBUG nova.compute.manager [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 587.653031] env[68674]: INFO nova.compute.manager [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Rebuilding instance [ 587.678317] env[68674]: DEBUG oslo_vmware.api [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Task: {'id': task-3239417, 'name': PowerOnVM_Task, 'duration_secs': 0.781253} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.678636] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 587.679245] env[68674]: INFO nova.compute.manager [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Took 13.00 seconds to spawn the instance on the hypervisor. [ 587.679245] env[68674]: DEBUG nova.compute.manager [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 587.681970] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fab0b8d-730c-4e8a-b3ca-095ba1a099cc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.712183] env[68674]: DEBUG nova.compute.utils [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 587.722411] env[68674]: DEBUG nova.compute.manager [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 587.722411] env[68674]: DEBUG nova.network.neutron [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 357b515d-ef37-4688-969e-f894be30edb7] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 587.739654] env[68674]: DEBUG nova.compute.manager [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 587.741219] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77f25409-3f7e-4a9b-8ff5-84dfe21c28d3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.808231] env[68674]: DEBUG nova.policy [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e12ae6d61f0147dbb760e5598d24a53e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cc7acf9ab6ee4ce49cc6d971fa212411', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 587.828813] env[68674]: DEBUG oslo_vmware.api [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239418, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.845662] env[68674]: DEBUG oslo_vmware.api [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239419, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.084484] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 588.146379] env[68674]: DEBUG nova.network.neutron [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 588.222093] env[68674]: DEBUG nova.compute.manager [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 588.226907] env[68674]: INFO nova.compute.manager [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Took 25.67 seconds to build instance. [ 588.325090] env[68674]: DEBUG oslo_vmware.api [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239418, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.347802] env[68674]: DEBUG oslo_vmware.api [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239419, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.892186} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.348082] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 3d85c8c4-f09c-4f75-aff5-9a49d84ae006/3d85c8c4-f09c-4f75-aff5-9a49d84ae006.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 588.348302] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 588.348551] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f5285ef0-5d19-44be-ad0a-f6e6b3b86a76 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.368076] env[68674]: DEBUG oslo_vmware.api [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 588.368076] env[68674]: value = "task-3239421" [ 588.368076] env[68674]: _type = "Task" [ 588.368076] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.388346] env[68674]: DEBUG oslo_vmware.api [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239421, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.732729] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0c2d1e3e-f287-45dd-8566-79a0dadd8371 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Lock "503e9328-bbd8-414f-8bea-250ed8247d67" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.192s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 588.743961] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca008ba3-d582-40c8-b00e-2ad8415e5c75 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.758341] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd59923e-997d-49d8-ab87-22e2ac4177cb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.766653] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 588.791989] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-095bf6d5-c424-4ea8-a2d3-bb9b15c14588 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.795883] env[68674]: DEBUG nova.network.neutron [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Updating instance_info_cache with network_info: [{"id": "5da6718a-7772-42cf-869d-77f84c2984f9", "address": "fa:16:3e:ec:a5:54", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.188", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5da6718a-77", "ovs_interfaceid": "5da6718a-7772-42cf-869d-77f84c2984f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 588.799081] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31fce802-9288-4721-8909-4b97bf9ef9de {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.809625] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c413be1-254d-49a7-bf5d-5845ff285e72 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.817460] env[68674]: DEBUG oslo_vmware.api [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Waiting for the task: (returnval){ [ 588.817460] env[68674]: value = "task-3239422" [ 588.817460] env[68674]: _type = "Task" [ 588.817460] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.837163] env[68674]: DEBUG nova.compute.provider_tree [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 588.842893] env[68674]: DEBUG oslo_vmware.api [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239418, 'name': CloneVM_Task} progress is 95%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.848734] env[68674]: DEBUG nova.compute.manager [req-3b36efb2-fc52-499d-b84e-58b417a12adc req-47a918fd-92e5-462a-9ee1-c82ee9af4ae4 service nova] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Received event network-vif-plugged-1b7ed5a9-214f-4011-b73e-63954c02e25e {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 588.849042] env[68674]: DEBUG oslo_concurrency.lockutils [req-3b36efb2-fc52-499d-b84e-58b417a12adc req-47a918fd-92e5-462a-9ee1-c82ee9af4ae4 service nova] Acquiring lock "3d85c8c4-f09c-4f75-aff5-9a49d84ae006-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 588.849212] env[68674]: DEBUG oslo_concurrency.lockutils [req-3b36efb2-fc52-499d-b84e-58b417a12adc req-47a918fd-92e5-462a-9ee1-c82ee9af4ae4 service nova] Lock "3d85c8c4-f09c-4f75-aff5-9a49d84ae006-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 588.849416] env[68674]: DEBUG oslo_concurrency.lockutils [req-3b36efb2-fc52-499d-b84e-58b417a12adc req-47a918fd-92e5-462a-9ee1-c82ee9af4ae4 service nova] Lock "3d85c8c4-f09c-4f75-aff5-9a49d84ae006-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 588.849594] env[68674]: DEBUG nova.compute.manager [req-3b36efb2-fc52-499d-b84e-58b417a12adc req-47a918fd-92e5-462a-9ee1-c82ee9af4ae4 service nova] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] No waiting events found dispatching network-vif-plugged-1b7ed5a9-214f-4011-b73e-63954c02e25e {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 588.849802] env[68674]: WARNING nova.compute.manager [req-3b36efb2-fc52-499d-b84e-58b417a12adc req-47a918fd-92e5-462a-9ee1-c82ee9af4ae4 service nova] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Received unexpected event network-vif-plugged-1b7ed5a9-214f-4011-b73e-63954c02e25e for instance with vm_state building and task_state spawning. [ 588.849974] env[68674]: DEBUG nova.compute.manager [req-3b36efb2-fc52-499d-b84e-58b417a12adc req-47a918fd-92e5-462a-9ee1-c82ee9af4ae4 service nova] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Received event network-changed-1b7ed5a9-214f-4011-b73e-63954c02e25e {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 588.850138] env[68674]: DEBUG nova.compute.manager [req-3b36efb2-fc52-499d-b84e-58b417a12adc req-47a918fd-92e5-462a-9ee1-c82ee9af4ae4 service nova] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Refreshing instance network info cache due to event network-changed-1b7ed5a9-214f-4011-b73e-63954c02e25e. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 588.850357] env[68674]: DEBUG oslo_concurrency.lockutils [req-3b36efb2-fc52-499d-b84e-58b417a12adc req-47a918fd-92e5-462a-9ee1-c82ee9af4ae4 service nova] Acquiring lock "refresh_cache-3d85c8c4-f09c-4f75-aff5-9a49d84ae006" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 588.850494] env[68674]: DEBUG oslo_concurrency.lockutils [req-3b36efb2-fc52-499d-b84e-58b417a12adc req-47a918fd-92e5-462a-9ee1-c82ee9af4ae4 service nova] Acquired lock "refresh_cache-3d85c8c4-f09c-4f75-aff5-9a49d84ae006" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 588.850649] env[68674]: DEBUG nova.network.neutron [req-3b36efb2-fc52-499d-b84e-58b417a12adc req-47a918fd-92e5-462a-9ee1-c82ee9af4ae4 service nova] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Refreshing network info cache for port 1b7ed5a9-214f-4011-b73e-63954c02e25e {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 588.853397] env[68674]: DEBUG oslo_vmware.api [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Task: {'id': task-3239422, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.868260] env[68674]: DEBUG nova.compute.manager [req-b40cb8e2-9a18-45d2-828b-2c879aa239e0 req-d7828bdb-cc72-46ee-afc5-b34c817eb2ce service nova] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Received event network-changed-27bb7642-85fc-46b6-9ac7-8a3a6db3271a {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 588.868260] env[68674]: DEBUG nova.compute.manager [req-b40cb8e2-9a18-45d2-828b-2c879aa239e0 req-d7828bdb-cc72-46ee-afc5-b34c817eb2ce service nova] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Refreshing instance network info cache due to event network-changed-27bb7642-85fc-46b6-9ac7-8a3a6db3271a. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 588.868260] env[68674]: DEBUG oslo_concurrency.lockutils [req-b40cb8e2-9a18-45d2-828b-2c879aa239e0 req-d7828bdb-cc72-46ee-afc5-b34c817eb2ce service nova] Acquiring lock "refresh_cache-58830b0e-dbf3-424d-8b9a-bb298b6bea21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 588.869489] env[68674]: DEBUG oslo_concurrency.lockutils [req-b40cb8e2-9a18-45d2-828b-2c879aa239e0 req-d7828bdb-cc72-46ee-afc5-b34c817eb2ce service nova] Acquired lock "refresh_cache-58830b0e-dbf3-424d-8b9a-bb298b6bea21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 588.869795] env[68674]: DEBUG nova.network.neutron [req-b40cb8e2-9a18-45d2-828b-2c879aa239e0 req-d7828bdb-cc72-46ee-afc5-b34c817eb2ce service nova] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Refreshing network info cache for port 27bb7642-85fc-46b6-9ac7-8a3a6db3271a {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 588.885800] env[68674]: DEBUG oslo_vmware.api [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239421, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.120795} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.885800] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 588.887508] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9a8c10c-df1a-4d8a-8b1c-8cef3e479b32 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.914320] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] 3d85c8c4-f09c-4f75-aff5-9a49d84ae006/3d85c8c4-f09c-4f75-aff5-9a49d84ae006.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 588.915274] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7e01275e-6ae9-4480-9d7e-5a3d1c8f1e62 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.934675] env[68674]: DEBUG nova.network.neutron [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Successfully created port: 4f848177-8140-4862-a7f0-f901b045c157 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 588.944595] env[68674]: DEBUG oslo_vmware.api [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 588.944595] env[68674]: value = "task-3239423" [ 588.944595] env[68674]: _type = "Task" [ 588.944595] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.955708] env[68674]: DEBUG oslo_vmware.api [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239423, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.243089] env[68674]: DEBUG nova.compute.manager [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 589.243921] env[68674]: DEBUG nova.compute.manager [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 589.292320] env[68674]: DEBUG nova.network.neutron [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Successfully updated port: 2bfad8ae-7c3e-4d21-9729-4f11937e5698 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 589.302025] env[68674]: DEBUG nova.virt.hardware [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 589.302025] env[68674]: DEBUG nova.virt.hardware [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 589.302025] env[68674]: DEBUG nova.virt.hardware [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 589.302585] env[68674]: DEBUG nova.virt.hardware [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 589.302585] env[68674]: DEBUG nova.virt.hardware [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 589.302585] env[68674]: DEBUG nova.virt.hardware [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 589.302585] env[68674]: DEBUG nova.virt.hardware [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 589.302585] env[68674]: DEBUG nova.virt.hardware [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 589.302940] env[68674]: DEBUG nova.virt.hardware [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 589.303318] env[68674]: DEBUG nova.virt.hardware [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 589.303979] env[68674]: DEBUG nova.virt.hardware [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 589.304923] env[68674]: DEBUG oslo_concurrency.lockutils [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Releasing lock "refresh_cache-d1c7a508-7d45-4eff-bb06-b85bfe392772" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 589.305294] env[68674]: DEBUG nova.compute.manager [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Instance network_info: |[{"id": "5da6718a-7772-42cf-869d-77f84c2984f9", "address": "fa:16:3e:ec:a5:54", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.188", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5da6718a-77", "ovs_interfaceid": "5da6718a-7772-42cf-869d-77f84c2984f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 589.306801] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-851f6e97-2146-4b9c-98e8-254a9c50e63d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.311206] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:a5:54', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'df1bf911-aac9-4d2d-ae69-66ace3e6a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5da6718a-7772-42cf-869d-77f84c2984f9', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 589.319866] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Creating folder: Project (f90a5acbb1c14d0480034fe257671d5a). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 589.320669] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1a663d59-06fe-42a8-bcad-116ceedfe8fe {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.342492] env[68674]: DEBUG nova.scheduler.client.report [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 589.348465] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b025d241-e69a-42ef-bf40-f032aad2cf66 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.362660] env[68674]: DEBUG oslo_vmware.api [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239418, 'name': CloneVM_Task, 'duration_secs': 1.732928} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.362904] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Created folder: Project (f90a5acbb1c14d0480034fe257671d5a) in parent group-v647377. [ 589.363649] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Creating folder: Instances. Parent ref: group-v647415. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 589.368568] env[68674]: INFO nova.virt.vmwareapi.vmops [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Created linked-clone VM from snapshot [ 589.368835] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a1af9e52-98e0-45a2-a051-6a127a2d754d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.370803] env[68674]: DEBUG oslo_vmware.api [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Task: {'id': task-3239422, 'name': PowerOffVM_Task, 'duration_secs': 0.187041} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.373811] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0ad4b55-1ebc-46da-869c-e4d21fed1a73 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.377447] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 589.377720] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 589.383018] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16bc8659-f232-454d-8231-f77b70ccd00a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.401334] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Created folder: Instances in parent group-v647415. [ 589.401334] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 589.403329] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 589.403870] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Uploading image 69b64169-5076-4f13-b5fb-4be17d6e4788 {{(pid=68674) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 589.408310] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-25a276d1-7ecb-46f4-a3ec-8314094bb132 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.424870] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 589.425341] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e9ee0595-fb81-485a-bfd1-dc43b4e49250 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.433658] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 589.433658] env[68674]: value = "task-3239427" [ 589.433658] env[68674]: _type = "Task" [ 589.433658] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.445429] env[68674]: DEBUG oslo_vmware.rw_handles [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 589.445429] env[68674]: value = "vm-647414" [ 589.445429] env[68674]: _type = "VirtualMachine" [ 589.445429] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 589.445934] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-c07524a7-fda1-45ca-9b43-353df253d3b5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.456435] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239427, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.459948] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 589.460135] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 589.460318] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Deleting the datastore file [datastore2] 89ccc16e-d0e5-4f7d-985c-8693188e7ed5 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 589.464070] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dc19bf9a-5cec-483c-a893-365c0f9aa3d1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.466430] env[68674]: DEBUG oslo_vmware.api [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239423, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.468085] env[68674]: DEBUG oslo_vmware.rw_handles [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lease: (returnval){ [ 589.468085] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f371ca-572b-1db2-e431-fbc95b954a70" [ 589.468085] env[68674]: _type = "HttpNfcLease" [ 589.468085] env[68674]: } obtained for exporting VM: (result){ [ 589.468085] env[68674]: value = "vm-647414" [ 589.468085] env[68674]: _type = "VirtualMachine" [ 589.468085] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 589.468377] env[68674]: DEBUG oslo_vmware.api [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the lease: (returnval){ [ 589.468377] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f371ca-572b-1db2-e431-fbc95b954a70" [ 589.468377] env[68674]: _type = "HttpNfcLease" [ 589.468377] env[68674]: } to be ready. {{(pid=68674) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 589.475653] env[68674]: DEBUG oslo_vmware.api [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Waiting for the task: (returnval){ [ 589.475653] env[68674]: value = "task-3239429" [ 589.475653] env[68674]: _type = "Task" [ 589.475653] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.477746] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 589.477746] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f371ca-572b-1db2-e431-fbc95b954a70" [ 589.477746] env[68674]: _type = "HttpNfcLease" [ 589.477746] env[68674]: } is initializing. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 589.492482] env[68674]: DEBUG oslo_vmware.api [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Task: {'id': task-3239429, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.783690] env[68674]: DEBUG oslo_concurrency.lockutils [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 589.796473] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Acquiring lock "refresh_cache-9e337960-78c1-4ddb-a6f6-d6fd57dbf86d" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 589.796473] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Acquired lock "refresh_cache-9e337960-78c1-4ddb-a6f6-d6fd57dbf86d" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 589.797295] env[68674]: DEBUG nova.network.neutron [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 589.855683] env[68674]: DEBUG oslo_concurrency.lockutils [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.653s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 589.855888] env[68674]: DEBUG nova.compute.manager [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 589.859538] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.399s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 589.860693] env[68674]: INFO nova.compute.claims [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 589.948786] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239427, 'name': CreateVM_Task, 'duration_secs': 0.454235} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.953492] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 589.954603] env[68674]: DEBUG oslo_concurrency.lockutils [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 589.954777] env[68674]: DEBUG oslo_concurrency.lockutils [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 589.955123] env[68674]: DEBUG oslo_concurrency.lockutils [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 589.955788] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49dbb2e5-3773-435d-8886-33737da8fac5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.961863] env[68674]: DEBUG oslo_vmware.api [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239423, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.963082] env[68674]: DEBUG oslo_vmware.api [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Waiting for the task: (returnval){ [ 589.963082] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f15f63-2eba-32cc-49cb-fea8f2f3ce38" [ 589.963082] env[68674]: _type = "Task" [ 589.963082] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.973435] env[68674]: DEBUG oslo_vmware.api [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f15f63-2eba-32cc-49cb-fea8f2f3ce38, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.982967] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 589.982967] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f371ca-572b-1db2-e431-fbc95b954a70" [ 589.982967] env[68674]: _type = "HttpNfcLease" [ 589.982967] env[68674]: } is ready. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 589.983642] env[68674]: DEBUG oslo_vmware.rw_handles [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 589.983642] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f371ca-572b-1db2-e431-fbc95b954a70" [ 589.983642] env[68674]: _type = "HttpNfcLease" [ 589.983642] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 589.984416] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afb496e1-ab69-4774-94d2-228c770a28f6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.991568] env[68674]: DEBUG oslo_vmware.api [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Task: {'id': task-3239429, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169094} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.991568] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 589.991568] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 589.991568] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 589.998400] env[68674]: DEBUG oslo_vmware.rw_handles [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dc63f2-81e9-78ca-a366-4ecc0197c36c/disk-0.vmdk from lease info. {{(pid=68674) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 589.998547] env[68674]: DEBUG oslo_vmware.rw_handles [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dc63f2-81e9-78ca-a366-4ecc0197c36c/disk-0.vmdk for reading. {{(pid=68674) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 590.096729] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-7198984d-a85f-4d1a-96e5-371e68ed59a9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.365926] env[68674]: DEBUG nova.compute.utils [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 590.385865] env[68674]: DEBUG nova.compute.manager [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 590.386081] env[68674]: DEBUG nova.network.neutron [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 590.454911] env[68674]: DEBUG nova.network.neutron [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 590.478491] env[68674]: DEBUG oslo_vmware.api [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239423, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.484824] env[68674]: DEBUG nova.policy [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3708546dea34437f9bcbd5504c346d33', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8e05a97545e94e8a9be8f382457d79b2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 590.491633] env[68674]: DEBUG oslo_vmware.api [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f15f63-2eba-32cc-49cb-fea8f2f3ce38, 'name': SearchDatastore_Task, 'duration_secs': 0.016174} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.491939] env[68674]: DEBUG oslo_concurrency.lockutils [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 590.492352] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 590.492756] env[68674]: DEBUG oslo_concurrency.lockutils [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 590.492994] env[68674]: DEBUG oslo_concurrency.lockutils [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 590.493215] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 590.493564] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9a959772-1f7f-4024-94f0-7dbe952aeea7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.506430] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 590.506652] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 590.507803] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb96d1a4-273f-4917-8b38-871eb0145251 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.514548] env[68674]: DEBUG oslo_vmware.api [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Waiting for the task: (returnval){ [ 590.514548] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5269e51f-f955-1535-464a-3528d0e10db8" [ 590.514548] env[68674]: _type = "Task" [ 590.514548] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.528994] env[68674]: DEBUG oslo_vmware.api [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5269e51f-f955-1535-464a-3528d0e10db8, 'name': SearchDatastore_Task, 'duration_secs': 0.011069} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.531022] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68d33bc7-6248-41fe-9b2c-73a95ef312a3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.538031] env[68674]: DEBUG oslo_vmware.api [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Waiting for the task: (returnval){ [ 590.538031] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527c4547-f49f-04c9-4173-ab705319843a" [ 590.538031] env[68674]: _type = "Task" [ 590.538031] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.548762] env[68674]: DEBUG oslo_vmware.api [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527c4547-f49f-04c9-4173-ab705319843a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.666935] env[68674]: DEBUG nova.network.neutron [req-b40cb8e2-9a18-45d2-828b-2c879aa239e0 req-d7828bdb-cc72-46ee-afc5-b34c817eb2ce service nova] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Updated VIF entry in instance network info cache for port 27bb7642-85fc-46b6-9ac7-8a3a6db3271a. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 590.667330] env[68674]: DEBUG nova.network.neutron [req-b40cb8e2-9a18-45d2-828b-2c879aa239e0 req-d7828bdb-cc72-46ee-afc5-b34c817eb2ce service nova] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Updating instance_info_cache with network_info: [{"id": "27bb7642-85fc-46b6-9ac7-8a3a6db3271a", "address": "fa:16:3e:55:ec:a9", "network": {"id": "a8c05d42-7059-4056-96f9-f929e862948a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-2017205475-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c18d4b8073be4aa59800f2c6f482fdad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4c5eb94-841c-4713-985a-8fc4117fbaf1", "external-id": "nsx-vlan-transportzone-425", "segmentation_id": 425, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27bb7642-85", "ovs_interfaceid": "27bb7642-85fc-46b6-9ac7-8a3a6db3271a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 590.690121] env[68674]: DEBUG nova.network.neutron [req-3b36efb2-fc52-499d-b84e-58b417a12adc req-47a918fd-92e5-462a-9ee1-c82ee9af4ae4 service nova] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Updated VIF entry in instance network info cache for port 1b7ed5a9-214f-4011-b73e-63954c02e25e. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 590.690121] env[68674]: DEBUG nova.network.neutron [req-3b36efb2-fc52-499d-b84e-58b417a12adc req-47a918fd-92e5-462a-9ee1-c82ee9af4ae4 service nova] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Updating instance_info_cache with network_info: [{"id": "1b7ed5a9-214f-4011-b73e-63954c02e25e", "address": "fa:16:3e:4f:37:80", "network": {"id": "cd9a6296-fa96-4117-b8b5-3884d0d16745", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1543887384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61ea6bfeb37d470a970e9c98e4827ade", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b7ed5a9-21", "ovs_interfaceid": "1b7ed5a9-214f-4011-b73e-63954c02e25e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 590.871228] env[68674]: DEBUG nova.compute.manager [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 590.964523] env[68674]: DEBUG oslo_vmware.api [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239423, 'name': ReconfigVM_Task, 'duration_secs': 1.73357} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.965086] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Reconfigured VM instance instance-0000000c to attach disk [datastore1] 3d85c8c4-f09c-4f75-aff5-9a49d84ae006/3d85c8c4-f09c-4f75-aff5-9a49d84ae006.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 590.965776] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-988c46f2-da73-403e-ae31-89c4677e3341 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.985380] env[68674]: DEBUG oslo_vmware.api [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 590.985380] env[68674]: value = "task-3239431" [ 590.985380] env[68674]: _type = "Task" [ 590.985380] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.995626] env[68674]: DEBUG oslo_vmware.api [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239431, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.042145] env[68674]: DEBUG nova.virt.hardware [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 591.042621] env[68674]: DEBUG nova.virt.hardware [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 591.042798] env[68674]: DEBUG nova.virt.hardware [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 591.042990] env[68674]: DEBUG nova.virt.hardware [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 591.043162] env[68674]: DEBUG nova.virt.hardware [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 591.043311] env[68674]: DEBUG nova.virt.hardware [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 591.043534] env[68674]: DEBUG nova.virt.hardware [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 591.043711] env[68674]: DEBUG nova.virt.hardware [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 591.043911] env[68674]: DEBUG nova.virt.hardware [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 591.044110] env[68674]: DEBUG nova.virt.hardware [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 591.044376] env[68674]: DEBUG nova.virt.hardware [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 591.048792] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a715b31-1cb5-4186-ad14-27d3ae54f7f9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.067135] env[68674]: DEBUG oslo_vmware.api [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527c4547-f49f-04c9-4173-ab705319843a, 'name': SearchDatastore_Task, 'duration_secs': 0.011761} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.069039] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b74f9234-7879-4381-9f87-6803e6a9f181 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.074911] env[68674]: DEBUG oslo_concurrency.lockutils [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 591.075860] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] d1c7a508-7d45-4eff-bb06-b85bfe392772/d1c7a508-7d45-4eff-bb06-b85bfe392772.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 591.076296] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a8c5dcc0-f9c5-4aa3-a029-88d3ae7d0744 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.080927] env[68674]: DEBUG nova.network.neutron [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Updating instance_info_cache with network_info: [{"id": "2bfad8ae-7c3e-4d21-9729-4f11937e5698", "address": "fa:16:3e:45:a0:e5", "network": {"id": "11312084-6643-4f7e-80a1-564aa531fc60", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-2008787772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c1c92eae8a804dda9dc372705034e8be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2bfad8ae-7c", "ovs_interfaceid": "2bfad8ae-7c3e-4d21-9729-4f11937e5698", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 591.092667] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Instance VIF info [] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 591.100383] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 591.105960] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 591.106378] env[68674]: DEBUG oslo_vmware.api [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Waiting for the task: (returnval){ [ 591.106378] env[68674]: value = "task-3239432" [ 591.106378] env[68674]: _type = "Task" [ 591.106378] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.107507] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c45e41c9-9fe6-430e-8aae-cf304af237b4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.143938] env[68674]: DEBUG oslo_vmware.api [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Task: {'id': task-3239432, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.144140] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 591.144140] env[68674]: value = "task-3239433" [ 591.144140] env[68674]: _type = "Task" [ 591.144140] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.161901] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239433, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.170748] env[68674]: DEBUG oslo_concurrency.lockutils [req-b40cb8e2-9a18-45d2-828b-2c879aa239e0 req-d7828bdb-cc72-46ee-afc5-b34c817eb2ce service nova] Releasing lock "refresh_cache-58830b0e-dbf3-424d-8b9a-bb298b6bea21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 591.192732] env[68674]: DEBUG oslo_concurrency.lockutils [req-3b36efb2-fc52-499d-b84e-58b417a12adc req-47a918fd-92e5-462a-9ee1-c82ee9af4ae4 service nova] Releasing lock "refresh_cache-3d85c8c4-f09c-4f75-aff5-9a49d84ae006" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 591.196489] env[68674]: DEBUG nova.compute.manager [req-3b36efb2-fc52-499d-b84e-58b417a12adc req-47a918fd-92e5-462a-9ee1-c82ee9af4ae4 service nova] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Received event network-vif-plugged-5da6718a-7772-42cf-869d-77f84c2984f9 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 591.196726] env[68674]: DEBUG oslo_concurrency.lockutils [req-3b36efb2-fc52-499d-b84e-58b417a12adc req-47a918fd-92e5-462a-9ee1-c82ee9af4ae4 service nova] Acquiring lock "d1c7a508-7d45-4eff-bb06-b85bfe392772-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 591.196937] env[68674]: DEBUG oslo_concurrency.lockutils [req-3b36efb2-fc52-499d-b84e-58b417a12adc req-47a918fd-92e5-462a-9ee1-c82ee9af4ae4 service nova] Lock "d1c7a508-7d45-4eff-bb06-b85bfe392772-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 591.197127] env[68674]: DEBUG oslo_concurrency.lockutils [req-3b36efb2-fc52-499d-b84e-58b417a12adc req-47a918fd-92e5-462a-9ee1-c82ee9af4ae4 service nova] Lock "d1c7a508-7d45-4eff-bb06-b85bfe392772-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 591.197333] env[68674]: DEBUG nova.compute.manager [req-3b36efb2-fc52-499d-b84e-58b417a12adc req-47a918fd-92e5-462a-9ee1-c82ee9af4ae4 service nova] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] No waiting events found dispatching network-vif-plugged-5da6718a-7772-42cf-869d-77f84c2984f9 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 591.197714] env[68674]: WARNING nova.compute.manager [req-3b36efb2-fc52-499d-b84e-58b417a12adc req-47a918fd-92e5-462a-9ee1-c82ee9af4ae4 service nova] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Received unexpected event network-vif-plugged-5da6718a-7772-42cf-869d-77f84c2984f9 for instance with vm_state building and task_state spawning. [ 591.198193] env[68674]: DEBUG nova.compute.manager [req-3b36efb2-fc52-499d-b84e-58b417a12adc req-47a918fd-92e5-462a-9ee1-c82ee9af4ae4 service nova] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Received event network-changed-5da6718a-7772-42cf-869d-77f84c2984f9 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 591.198193] env[68674]: DEBUG nova.compute.manager [req-3b36efb2-fc52-499d-b84e-58b417a12adc req-47a918fd-92e5-462a-9ee1-c82ee9af4ae4 service nova] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Refreshing instance network info cache due to event network-changed-5da6718a-7772-42cf-869d-77f84c2984f9. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 591.198408] env[68674]: DEBUG oslo_concurrency.lockutils [req-3b36efb2-fc52-499d-b84e-58b417a12adc req-47a918fd-92e5-462a-9ee1-c82ee9af4ae4 service nova] Acquiring lock "refresh_cache-d1c7a508-7d45-4eff-bb06-b85bfe392772" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 591.198649] env[68674]: DEBUG oslo_concurrency.lockutils [req-3b36efb2-fc52-499d-b84e-58b417a12adc req-47a918fd-92e5-462a-9ee1-c82ee9af4ae4 service nova] Acquired lock "refresh_cache-d1c7a508-7d45-4eff-bb06-b85bfe392772" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 591.198706] env[68674]: DEBUG nova.network.neutron [req-3b36efb2-fc52-499d-b84e-58b417a12adc req-47a918fd-92e5-462a-9ee1-c82ee9af4ae4 service nova] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Refreshing network info cache for port 5da6718a-7772-42cf-869d-77f84c2984f9 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 591.501525] env[68674]: DEBUG oslo_vmware.api [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239431, 'name': Rename_Task, 'duration_secs': 0.223383} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.501525] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 591.501840] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f859fb42-b074-41d0-8ebb-4f1b3e8886db {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.512394] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43f4aa9e-730b-44b7-84f7-fbaacbfef323 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.515684] env[68674]: DEBUG oslo_vmware.api [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 591.515684] env[68674]: value = "task-3239434" [ 591.515684] env[68674]: _type = "Task" [ 591.515684] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.525429] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82282f44-9596-4704-840d-a939e0071d0a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.532697] env[68674]: DEBUG oslo_vmware.api [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239434, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.567099] env[68674]: DEBUG nova.network.neutron [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Successfully created port: 21aba0b8-ff69-4bec-829d-29a8f6941877 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 591.572451] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d95542d-e072-4a30-b32f-393245ae557a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.579927] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16caccd3-f603-41df-9d06-c039abfdd69b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.585542] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Releasing lock "refresh_cache-9e337960-78c1-4ddb-a6f6-d6fd57dbf86d" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 591.586128] env[68674]: DEBUG nova.compute.manager [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Instance network_info: |[{"id": "2bfad8ae-7c3e-4d21-9729-4f11937e5698", "address": "fa:16:3e:45:a0:e5", "network": {"id": "11312084-6643-4f7e-80a1-564aa531fc60", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-2008787772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c1c92eae8a804dda9dc372705034e8be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2bfad8ae-7c", "ovs_interfaceid": "2bfad8ae-7c3e-4d21-9729-4f11937e5698", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 591.586586] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:a0:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f3d7e184-c87f-47a5-8d0d-9fa20e07e669', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2bfad8ae-7c3e-4d21-9729-4f11937e5698', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 591.596180] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Creating folder: Project (c1c92eae8a804dda9dc372705034e8be). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 591.597203] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c4cf6bb7-613a-4c4e-8207-e772da825e44 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.610417] env[68674]: DEBUG nova.compute.provider_tree [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 591.613688] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Created folder: Project (c1c92eae8a804dda9dc372705034e8be) in parent group-v647377. [ 591.613992] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Creating folder: Instances. Parent ref: group-v647419. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 591.614640] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2b020894-e5c9-4a05-9140-3de584b709f4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.631308] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Created folder: Instances in parent group-v647419. [ 591.631680] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 591.632485] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 591.632847] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-31e98cf8-c46e-4b20-8c31-e8c3f58f2bb1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.655564] env[68674]: DEBUG oslo_vmware.api [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Task: {'id': task-3239432, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.533366} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.659626] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] d1c7a508-7d45-4eff-bb06-b85bfe392772/d1c7a508-7d45-4eff-bb06-b85bfe392772.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 591.659931] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 591.660592] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-93a23ccc-fe43-4c55-809d-9a9625d06802 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.671125] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 591.671125] env[68674]: value = "task-3239438" [ 591.671125] env[68674]: _type = "Task" [ 591.671125] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.672795] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239433, 'name': CreateVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.678056] env[68674]: DEBUG oslo_vmware.api [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Waiting for the task: (returnval){ [ 591.678056] env[68674]: value = "task-3239439" [ 591.678056] env[68674]: _type = "Task" [ 591.678056] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.686231] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239438, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.689879] env[68674]: DEBUG oslo_vmware.api [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Task: {'id': task-3239439, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.886310] env[68674]: DEBUG nova.compute.manager [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 591.926635] env[68674]: DEBUG nova.virt.hardware [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:07:04Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='745021413',id=22,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-1931930460',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 591.927038] env[68674]: DEBUG nova.virt.hardware [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 591.927435] env[68674]: DEBUG nova.virt.hardware [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 591.927833] env[68674]: DEBUG nova.virt.hardware [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 591.928323] env[68674]: DEBUG nova.virt.hardware [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 591.928507] env[68674]: DEBUG nova.virt.hardware [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 591.928751] env[68674]: DEBUG nova.virt.hardware [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 591.928933] env[68674]: DEBUG nova.virt.hardware [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 591.929133] env[68674]: DEBUG nova.virt.hardware [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 591.929524] env[68674]: DEBUG nova.virt.hardware [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 591.929742] env[68674]: DEBUG nova.virt.hardware [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 591.930694] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-843fd69b-c25d-477e-a477-09eeb9844791 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.944554] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-396f5748-8c0d-4a0a-b07b-3d7f42eda375 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.992984] env[68674]: DEBUG nova.compute.manager [req-42dba884-c52e-49d4-aef4-853a20293fa3 req-241c79b1-2196-4a8a-b083-2841e473e745 service nova] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Received event network-vif-plugged-2bfad8ae-7c3e-4d21-9729-4f11937e5698 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 591.993673] env[68674]: DEBUG oslo_concurrency.lockutils [req-42dba884-c52e-49d4-aef4-853a20293fa3 req-241c79b1-2196-4a8a-b083-2841e473e745 service nova] Acquiring lock "9e337960-78c1-4ddb-a6f6-d6fd57dbf86d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 591.993924] env[68674]: DEBUG oslo_concurrency.lockutils [req-42dba884-c52e-49d4-aef4-853a20293fa3 req-241c79b1-2196-4a8a-b083-2841e473e745 service nova] Lock "9e337960-78c1-4ddb-a6f6-d6fd57dbf86d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 591.994184] env[68674]: DEBUG oslo_concurrency.lockutils [req-42dba884-c52e-49d4-aef4-853a20293fa3 req-241c79b1-2196-4a8a-b083-2841e473e745 service nova] Lock "9e337960-78c1-4ddb-a6f6-d6fd57dbf86d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 591.994361] env[68674]: DEBUG nova.compute.manager [req-42dba884-c52e-49d4-aef4-853a20293fa3 req-241c79b1-2196-4a8a-b083-2841e473e745 service nova] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] No waiting events found dispatching network-vif-plugged-2bfad8ae-7c3e-4d21-9729-4f11937e5698 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 591.994656] env[68674]: WARNING nova.compute.manager [req-42dba884-c52e-49d4-aef4-853a20293fa3 req-241c79b1-2196-4a8a-b083-2841e473e745 service nova] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Received unexpected event network-vif-plugged-2bfad8ae-7c3e-4d21-9729-4f11937e5698 for instance with vm_state building and task_state spawning. [ 591.994835] env[68674]: DEBUG nova.compute.manager [req-42dba884-c52e-49d4-aef4-853a20293fa3 req-241c79b1-2196-4a8a-b083-2841e473e745 service nova] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Received event network-changed-2bfad8ae-7c3e-4d21-9729-4f11937e5698 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 591.995104] env[68674]: DEBUG nova.compute.manager [req-42dba884-c52e-49d4-aef4-853a20293fa3 req-241c79b1-2196-4a8a-b083-2841e473e745 service nova] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Refreshing instance network info cache due to event network-changed-2bfad8ae-7c3e-4d21-9729-4f11937e5698. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 591.995550] env[68674]: DEBUG oslo_concurrency.lockutils [req-42dba884-c52e-49d4-aef4-853a20293fa3 req-241c79b1-2196-4a8a-b083-2841e473e745 service nova] Acquiring lock "refresh_cache-9e337960-78c1-4ddb-a6f6-d6fd57dbf86d" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 591.995804] env[68674]: DEBUG oslo_concurrency.lockutils [req-42dba884-c52e-49d4-aef4-853a20293fa3 req-241c79b1-2196-4a8a-b083-2841e473e745 service nova] Acquired lock "refresh_cache-9e337960-78c1-4ddb-a6f6-d6fd57dbf86d" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 591.996078] env[68674]: DEBUG nova.network.neutron [req-42dba884-c52e-49d4-aef4-853a20293fa3 req-241c79b1-2196-4a8a-b083-2841e473e745 service nova] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Refreshing network info cache for port 2bfad8ae-7c3e-4d21-9729-4f11937e5698 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 592.031731] env[68674]: DEBUG oslo_vmware.api [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239434, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.116383] env[68674]: DEBUG nova.scheduler.client.report [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 592.170518] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239433, 'name': CreateVM_Task, 'duration_secs': 0.616106} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.170518] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 592.170518] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.170518] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 592.170518] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 592.170518] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6509e935-5b5c-4cb1-8478-7c65a2d0efa3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.185655] env[68674]: DEBUG oslo_vmware.api [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Waiting for the task: (returnval){ [ 592.185655] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b201ba-6b28-7ea7-30b6-9bf2d5113e7c" [ 592.185655] env[68674]: _type = "Task" [ 592.185655] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.189646] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239438, 'name': CreateVM_Task, 'duration_secs': 0.430549} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.194022] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 592.198480] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.198823] env[68674]: DEBUG oslo_vmware.api [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Task: {'id': task-3239439, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082548} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.199992] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 592.200663] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc8ef30d-ac38-4a78-b985-f70c014ff5c4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.207518] env[68674]: DEBUG oslo_vmware.api [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b201ba-6b28-7ea7-30b6-9bf2d5113e7c, 'name': SearchDatastore_Task, 'duration_secs': 0.012307} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.208297] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 592.208721] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 592.209064] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.209302] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 592.209574] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 592.209923] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 592.210507] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 592.210689] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-76b9d050-33a9-4f49-92ce-c5fe36475cff {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.225173] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db9119aa-36b5-4f63-8a75-2144a4280fc2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.237809] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] d1c7a508-7d45-4eff-bb06-b85bfe392772/d1c7a508-7d45-4eff-bb06-b85bfe392772.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 592.238806] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6276adc9-113b-49db-8b8d-0c98f5764bbb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.258045] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 592.259054] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 592.260537] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4024f04-5f08-46de-a768-48c7044d30ef {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.265593] env[68674]: DEBUG oslo_vmware.api [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Waiting for the task: (returnval){ [ 592.265593] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]520797c5-f230-a74c-403b-b66131919034" [ 592.265593] env[68674]: _type = "Task" [ 592.265593] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.268014] env[68674]: DEBUG oslo_vmware.api [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Waiting for the task: (returnval){ [ 592.268014] env[68674]: value = "task-3239440" [ 592.268014] env[68674]: _type = "Task" [ 592.268014] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.277597] env[68674]: DEBUG oslo_vmware.api [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Waiting for the task: (returnval){ [ 592.277597] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52220e5d-3fc2-52c5-a0e0-6dd6f5b6256e" [ 592.277597] env[68674]: _type = "Task" [ 592.277597] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.290027] env[68674]: DEBUG oslo_vmware.api [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Task: {'id': task-3239440, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.290488] env[68674]: DEBUG oslo_vmware.api [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]520797c5-f230-a74c-403b-b66131919034, 'name': SearchDatastore_Task, 'duration_secs': 0.012224} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.291490] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 592.291864] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 592.292291] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.300020] env[68674]: DEBUG oslo_vmware.api [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52220e5d-3fc2-52c5-a0e0-6dd6f5b6256e, 'name': SearchDatastore_Task, 'duration_secs': 0.011944} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.301204] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5f5f9b4-ca1a-4b29-ae1a-b3bad02c54d1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.309724] env[68674]: DEBUG oslo_vmware.api [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Waiting for the task: (returnval){ [ 592.309724] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52889a07-57d8-65d6-0737-965c02b6a260" [ 592.309724] env[68674]: _type = "Task" [ 592.309724] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.321937] env[68674]: DEBUG oslo_vmware.api [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52889a07-57d8-65d6-0737-965c02b6a260, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.370795] env[68674]: DEBUG nova.network.neutron [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Successfully updated port: 4f848177-8140-4862-a7f0-f901b045c157 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 592.507528] env[68674]: DEBUG nova.network.neutron [req-3b36efb2-fc52-499d-b84e-58b417a12adc req-47a918fd-92e5-462a-9ee1-c82ee9af4ae4 service nova] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Updated VIF entry in instance network info cache for port 5da6718a-7772-42cf-869d-77f84c2984f9. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 592.507973] env[68674]: DEBUG nova.network.neutron [req-3b36efb2-fc52-499d-b84e-58b417a12adc req-47a918fd-92e5-462a-9ee1-c82ee9af4ae4 service nova] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Updating instance_info_cache with network_info: [{"id": "5da6718a-7772-42cf-869d-77f84c2984f9", "address": "fa:16:3e:ec:a5:54", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.188", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5da6718a-77", "ovs_interfaceid": "5da6718a-7772-42cf-869d-77f84c2984f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 592.545877] env[68674]: DEBUG oslo_vmware.api [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239434, 'name': PowerOnVM_Task, 'duration_secs': 0.849749} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.547023] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 592.547443] env[68674]: INFO nova.compute.manager [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Took 11.32 seconds to spawn the instance on the hypervisor. [ 592.547494] env[68674]: DEBUG nova.compute.manager [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 592.548496] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f9ee24b-dc90-4f27-9cc0-78c835a2531a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.626264] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.766s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 592.626410] env[68674]: DEBUG nova.compute.manager [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 592.629335] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.283s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 592.631280] env[68674]: INFO nova.compute.claims [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 592.792785] env[68674]: DEBUG oslo_vmware.api [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Task: {'id': task-3239440, 'name': ReconfigVM_Task, 'duration_secs': 0.410678} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.793892] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Reconfigured VM instance instance-0000000d to attach disk [datastore1] d1c7a508-7d45-4eff-bb06-b85bfe392772/d1c7a508-7d45-4eff-bb06-b85bfe392772.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 592.794276] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b2f5c37c-7f10-49e6-b239-557dd8da1964 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.804357] env[68674]: DEBUG oslo_vmware.api [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Waiting for the task: (returnval){ [ 592.804357] env[68674]: value = "task-3239441" [ 592.804357] env[68674]: _type = "Task" [ 592.804357] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.822015] env[68674]: DEBUG oslo_vmware.api [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Task: {'id': task-3239441, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.827940] env[68674]: DEBUG oslo_vmware.api [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52889a07-57d8-65d6-0737-965c02b6a260, 'name': SearchDatastore_Task, 'duration_secs': 0.013011} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.828324] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 592.828606] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 89ccc16e-d0e5-4f7d-985c-8693188e7ed5/89ccc16e-d0e5-4f7d-985c-8693188e7ed5.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 592.828884] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 592.829074] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 592.829281] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-440e028f-9f2d-4875-8581-654e6f621550 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.832353] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e93cfbfa-762f-4755-88b9-23f1a4cb6c5f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.844111] env[68674]: DEBUG oslo_vmware.api [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Waiting for the task: (returnval){ [ 592.844111] env[68674]: value = "task-3239442" [ 592.844111] env[68674]: _type = "Task" [ 592.844111] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.847598] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 592.847598] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 592.851487] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1892b8a0-abdd-45bf-8d5c-9452432e8ec7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.860238] env[68674]: DEBUG oslo_vmware.api [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Waiting for the task: (returnval){ [ 592.860238] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52e6f189-23e6-0c1e-5797-4e953c158b39" [ 592.860238] env[68674]: _type = "Task" [ 592.860238] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.869934] env[68674]: DEBUG oslo_vmware.api [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Task: {'id': task-3239442, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.876247] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Acquiring lock "refresh_cache-357b515d-ef37-4688-969e-f894be30edb7" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.876247] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Acquired lock "refresh_cache-357b515d-ef37-4688-969e-f894be30edb7" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 592.876247] env[68674]: DEBUG nova.network.neutron [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 592.885044] env[68674]: DEBUG oslo_vmware.api [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52e6f189-23e6-0c1e-5797-4e953c158b39, 'name': SearchDatastore_Task, 'duration_secs': 0.012081} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.885929] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b98d7ada-bcab-497c-9442-5988b4d64f02 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.894561] env[68674]: DEBUG oslo_vmware.api [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Waiting for the task: (returnval){ [ 592.894561] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5231ecf5-c6a1-6fe2-5b1f-076e65a2f0ba" [ 592.894561] env[68674]: _type = "Task" [ 592.894561] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.905458] env[68674]: DEBUG oslo_vmware.api [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5231ecf5-c6a1-6fe2-5b1f-076e65a2f0ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.019608] env[68674]: DEBUG nova.network.neutron [req-42dba884-c52e-49d4-aef4-853a20293fa3 req-241c79b1-2196-4a8a-b083-2841e473e745 service nova] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Updated VIF entry in instance network info cache for port 2bfad8ae-7c3e-4d21-9729-4f11937e5698. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 593.020173] env[68674]: DEBUG nova.network.neutron [req-42dba884-c52e-49d4-aef4-853a20293fa3 req-241c79b1-2196-4a8a-b083-2841e473e745 service nova] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Updating instance_info_cache with network_info: [{"id": "2bfad8ae-7c3e-4d21-9729-4f11937e5698", "address": "fa:16:3e:45:a0:e5", "network": {"id": "11312084-6643-4f7e-80a1-564aa531fc60", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-2008787772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c1c92eae8a804dda9dc372705034e8be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2bfad8ae-7c", "ovs_interfaceid": "2bfad8ae-7c3e-4d21-9729-4f11937e5698", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 593.022128] env[68674]: DEBUG oslo_concurrency.lockutils [req-3b36efb2-fc52-499d-b84e-58b417a12adc req-47a918fd-92e5-462a-9ee1-c82ee9af4ae4 service nova] Releasing lock "refresh_cache-d1c7a508-7d45-4eff-bb06-b85bfe392772" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 593.085676] env[68674]: INFO nova.compute.manager [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Took 25.37 seconds to build instance. [ 593.145666] env[68674]: DEBUG nova.compute.utils [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 593.148710] env[68674]: DEBUG nova.compute.manager [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 593.148710] env[68674]: DEBUG nova.network.neutron [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 593.245904] env[68674]: DEBUG nova.policy [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fcdb66599bea45219bbf9401434e9024', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c5be31196e1f452e8768b57c105d1765', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 593.326523] env[68674]: DEBUG oslo_vmware.api [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Task: {'id': task-3239441, 'name': Rename_Task, 'duration_secs': 0.204488} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.326961] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 593.327140] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e4e06cf8-6482-454d-9366-4ddc3291306b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.340067] env[68674]: DEBUG oslo_vmware.api [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Waiting for the task: (returnval){ [ 593.340067] env[68674]: value = "task-3239443" [ 593.340067] env[68674]: _type = "Task" [ 593.340067] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.360445] env[68674]: DEBUG oslo_vmware.api [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Task: {'id': task-3239443, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.364666] env[68674]: DEBUG oslo_vmware.api [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Task: {'id': task-3239442, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.411360] env[68674]: DEBUG oslo_vmware.api [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5231ecf5-c6a1-6fe2-5b1f-076e65a2f0ba, 'name': SearchDatastore_Task, 'duration_secs': 0.012379} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.411678] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 593.411897] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d/9e337960-78c1-4ddb-a6f6-d6fd57dbf86d.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 593.413201] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0d68b755-2f25-4ec9-a4e1-3561425ece77 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.427073] env[68674]: DEBUG oslo_vmware.api [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Waiting for the task: (returnval){ [ 593.427073] env[68674]: value = "task-3239444" [ 593.427073] env[68674]: _type = "Task" [ 593.427073] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.437472] env[68674]: DEBUG oslo_vmware.api [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Task: {'id': task-3239444, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.494637] env[68674]: DEBUG nova.network.neutron [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 593.531531] env[68674]: DEBUG oslo_concurrency.lockutils [req-42dba884-c52e-49d4-aef4-853a20293fa3 req-241c79b1-2196-4a8a-b083-2841e473e745 service nova] Releasing lock "refresh_cache-9e337960-78c1-4ddb-a6f6-d6fd57dbf86d" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 593.589356] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bd735016-1d07-4442-b084-919056515fe2 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "3d85c8c4-f09c-4f75-aff5-9a49d84ae006" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.888s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 593.652058] env[68674]: DEBUG nova.compute.manager [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 593.749892] env[68674]: DEBUG nova.compute.manager [None req-206b5ddd-a0de-40b2-bd67-93985469f280 tempest-ServerDiagnosticsV248Test-735375133 tempest-ServerDiagnosticsV248Test-735375133-project-admin] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 593.752976] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ed64988-275f-4cbb-a6ad-488ae6a5cd24 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.761158] env[68674]: INFO nova.compute.manager [None req-206b5ddd-a0de-40b2-bd67-93985469f280 tempest-ServerDiagnosticsV248Test-735375133 tempest-ServerDiagnosticsV248Test-735375133-project-admin] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Retrieving diagnostics [ 593.765073] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed0afa6b-2982-4a19-b75a-47a1b3b8e4ab {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.860916] env[68674]: DEBUG oslo_vmware.api [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Task: {'id': task-3239443, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.874626] env[68674]: DEBUG oslo_vmware.api [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Task: {'id': task-3239442, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.561025} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.875649] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 89ccc16e-d0e5-4f7d-985c-8693188e7ed5/89ccc16e-d0e5-4f7d-985c-8693188e7ed5.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 593.875649] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 593.875914] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-990e190b-d71b-4d61-859a-4aa8cede0e1b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.887296] env[68674]: DEBUG oslo_vmware.api [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Waiting for the task: (returnval){ [ 593.887296] env[68674]: value = "task-3239446" [ 593.887296] env[68674]: _type = "Task" [ 593.887296] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.905892] env[68674]: DEBUG oslo_vmware.api [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Task: {'id': task-3239446, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.940977] env[68674]: DEBUG oslo_vmware.api [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Task: {'id': task-3239444, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.993959] env[68674]: DEBUG nova.network.neutron [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Updating instance_info_cache with network_info: [{"id": "4f848177-8140-4862-a7f0-f901b045c157", "address": "fa:16:3e:2f:19:f3", "network": {"id": "f82002f4-7eb1-4240-818b-90533b23aec4", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2040854814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc7acf9ab6ee4ce49cc6d971fa212411", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbd7899c-c96e-47fc-9141-5803b646917a", "external-id": "nsx-vlan-transportzone-333", "segmentation_id": 333, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f848177-81", "ovs_interfaceid": "4f848177-8140-4862-a7f0-f901b045c157", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 594.093346] env[68674]: DEBUG nova.compute.manager [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 594.305319] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1f5f23f-a674-44b3-a34f-eeedf8ce7f53 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.318604] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b1a8969-de35-4fb4-9d43-02b5cb895c2a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.356604] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95735514-0e77-42f3-b50d-17025fd1ff52 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.367740] env[68674]: DEBUG oslo_vmware.api [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Task: {'id': task-3239443, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.371642] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87331c14-d866-4209-8279-ed2bbdb5540a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.388030] env[68674]: DEBUG nova.compute.provider_tree [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 594.393971] env[68674]: DEBUG nova.network.neutron [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Successfully created port: 00325c29-c6d8-4e3b-9410-aac7c5b9d5d2 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 594.402887] env[68674]: DEBUG oslo_vmware.api [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Task: {'id': task-3239446, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.109693} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.403197] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 594.404038] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fb4ed78-9713-4066-a6e7-913158bd3b46 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.429370] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Reconfiguring VM instance instance-00000004 to attach disk [datastore1] 89ccc16e-d0e5-4f7d-985c-8693188e7ed5/89ccc16e-d0e5-4f7d-985c-8693188e7ed5.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 594.430306] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d0b6588a-d3f5-4883-837d-6cd828d064d8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.459057] env[68674]: DEBUG oslo_vmware.api [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Task: {'id': task-3239444, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.672281} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.460140] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d/9e337960-78c1-4ddb-a6f6-d6fd57dbf86d.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 594.460397] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 594.460720] env[68674]: DEBUG oslo_vmware.api [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Waiting for the task: (returnval){ [ 594.460720] env[68674]: value = "task-3239447" [ 594.460720] env[68674]: _type = "Task" [ 594.460720] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.461290] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7d4e2177-6fb9-4b0d-a319-af75ab6a7251 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.477201] env[68674]: DEBUG oslo_vmware.api [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Task: {'id': task-3239447, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.477201] env[68674]: DEBUG oslo_vmware.api [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Waiting for the task: (returnval){ [ 594.477201] env[68674]: value = "task-3239448" [ 594.477201] env[68674]: _type = "Task" [ 594.477201] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.485422] env[68674]: DEBUG oslo_vmware.api [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Task: {'id': task-3239448, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.499565] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Releasing lock "refresh_cache-357b515d-ef37-4688-969e-f894be30edb7" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 594.500349] env[68674]: DEBUG nova.compute.manager [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Instance network_info: |[{"id": "4f848177-8140-4862-a7f0-f901b045c157", "address": "fa:16:3e:2f:19:f3", "network": {"id": "f82002f4-7eb1-4240-818b-90533b23aec4", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2040854814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc7acf9ab6ee4ce49cc6d971fa212411", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbd7899c-c96e-47fc-9141-5803b646917a", "external-id": "nsx-vlan-transportzone-333", "segmentation_id": 333, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f848177-81", "ovs_interfaceid": "4f848177-8140-4862-a7f0-f901b045c157", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 594.500905] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2f:19:f3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbd7899c-c96e-47fc-9141-5803b646917a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4f848177-8140-4862-a7f0-f901b045c157', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 594.511148] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Creating folder: Project (cc7acf9ab6ee4ce49cc6d971fa212411). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 594.512554] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-70a5af3e-a021-4846-870a-1631e685d450 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.526391] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Created folder: Project (cc7acf9ab6ee4ce49cc6d971fa212411) in parent group-v647377. [ 594.526391] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Creating folder: Instances. Parent ref: group-v647423. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 594.526391] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c13e4bcd-ac4f-4485-983c-4159df860718 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.541388] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Created folder: Instances in parent group-v647423. [ 594.542061] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 594.542785] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 594.542785] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-98f8b83e-e37d-4d4c-bc97-dc41dbd0797b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.568908] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 594.568908] env[68674]: value = "task-3239451" [ 594.568908] env[68674]: _type = "Task" [ 594.568908] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.580379] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239451, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.621594] env[68674]: DEBUG oslo_concurrency.lockutils [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 594.664257] env[68674]: DEBUG nova.compute.manager [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 594.706516] env[68674]: DEBUG nova.virt.hardware [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 594.706830] env[68674]: DEBUG nova.virt.hardware [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 594.706979] env[68674]: DEBUG nova.virt.hardware [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 594.708958] env[68674]: DEBUG nova.virt.hardware [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 594.709667] env[68674]: DEBUG nova.virt.hardware [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 594.709667] env[68674]: DEBUG nova.virt.hardware [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 594.709667] env[68674]: DEBUG nova.virt.hardware [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 594.710022] env[68674]: DEBUG nova.virt.hardware [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 594.710022] env[68674]: DEBUG nova.virt.hardware [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 594.710746] env[68674]: DEBUG nova.virt.hardware [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 594.710746] env[68674]: DEBUG nova.virt.hardware [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 594.711344] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-652782b5-e7b0-4905-984d-354473dbd928 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.729234] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e60448b8-7aab-451f-a853-57f4f7cc8048 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.864879] env[68674]: DEBUG oslo_vmware.api [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Task: {'id': task-3239443, 'name': PowerOnVM_Task, 'duration_secs': 1.475674} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.865273] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 594.865567] env[68674]: INFO nova.compute.manager [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Took 11.05 seconds to spawn the instance on the hypervisor. [ 594.865804] env[68674]: DEBUG nova.compute.manager [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 594.866782] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09e75d5c-697f-45fa-89f2-b6f45da14a6e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.897989] env[68674]: DEBUG nova.scheduler.client.report [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 594.975534] env[68674]: DEBUG oslo_vmware.api [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Task: {'id': task-3239447, 'name': ReconfigVM_Task, 'duration_secs': 0.437206} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.975956] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Reconfigured VM instance instance-00000004 to attach disk [datastore1] 89ccc16e-d0e5-4f7d-985c-8693188e7ed5/89ccc16e-d0e5-4f7d-985c-8693188e7ed5.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 594.977018] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-81cd3a5f-57c7-4a0a-bd5e-07765c5903d4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.991879] env[68674]: DEBUG oslo_vmware.api [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Task: {'id': task-3239448, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.121534} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.994226] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 594.994226] env[68674]: DEBUG oslo_vmware.api [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Waiting for the task: (returnval){ [ 594.994226] env[68674]: value = "task-3239452" [ 594.994226] env[68674]: _type = "Task" [ 594.994226] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.994881] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-335bfef3-b2e8-4f8f-b6bb-341813695efb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.009560] env[68674]: DEBUG oslo_vmware.api [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Task: {'id': task-3239452, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.029836] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Reconfiguring VM instance instance-0000000e to attach disk [datastore1] 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d/9e337960-78c1-4ddb-a6f6-d6fd57dbf86d.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 595.030166] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-537fd004-1378-4c60-9b2e-607febaa9b51 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.055639] env[68674]: DEBUG oslo_vmware.api [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Waiting for the task: (returnval){ [ 595.055639] env[68674]: value = "task-3239453" [ 595.055639] env[68674]: _type = "Task" [ 595.055639] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.067921] env[68674]: DEBUG oslo_vmware.api [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Task: {'id': task-3239453, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.082315] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239451, 'name': CreateVM_Task, 'duration_secs': 0.456247} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.082315] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 595.082315] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 595.082315] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 595.085254] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 595.085254] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7d5df7a-cb9b-4aa7-a80d-e888c935140f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.090882] env[68674]: DEBUG oslo_vmware.api [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Waiting for the task: (returnval){ [ 595.090882] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523ef96b-eccd-32a9-6b3b-c4ffaced46fa" [ 595.090882] env[68674]: _type = "Task" [ 595.090882] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.103381] env[68674]: DEBUG oslo_vmware.api [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523ef96b-eccd-32a9-6b3b-c4ffaced46fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.249769] env[68674]: DEBUG nova.compute.manager [req-af30862c-bdcd-43cd-9bbe-d4e2e4e77e23 req-360ea52a-f2f6-4f09-bd5b-a6954db33c29 service nova] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Received event network-vif-plugged-4f848177-8140-4862-a7f0-f901b045c157 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 595.250138] env[68674]: DEBUG oslo_concurrency.lockutils [req-af30862c-bdcd-43cd-9bbe-d4e2e4e77e23 req-360ea52a-f2f6-4f09-bd5b-a6954db33c29 service nova] Acquiring lock "357b515d-ef37-4688-969e-f894be30edb7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 595.250394] env[68674]: DEBUG oslo_concurrency.lockutils [req-af30862c-bdcd-43cd-9bbe-d4e2e4e77e23 req-360ea52a-f2f6-4f09-bd5b-a6954db33c29 service nova] Lock "357b515d-ef37-4688-969e-f894be30edb7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 595.250590] env[68674]: DEBUG oslo_concurrency.lockutils [req-af30862c-bdcd-43cd-9bbe-d4e2e4e77e23 req-360ea52a-f2f6-4f09-bd5b-a6954db33c29 service nova] Lock "357b515d-ef37-4688-969e-f894be30edb7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 595.250767] env[68674]: DEBUG nova.compute.manager [req-af30862c-bdcd-43cd-9bbe-d4e2e4e77e23 req-360ea52a-f2f6-4f09-bd5b-a6954db33c29 service nova] [instance: 357b515d-ef37-4688-969e-f894be30edb7] No waiting events found dispatching network-vif-plugged-4f848177-8140-4862-a7f0-f901b045c157 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 595.251862] env[68674]: WARNING nova.compute.manager [req-af30862c-bdcd-43cd-9bbe-d4e2e4e77e23 req-360ea52a-f2f6-4f09-bd5b-a6954db33c29 service nova] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Received unexpected event network-vif-plugged-4f848177-8140-4862-a7f0-f901b045c157 for instance with vm_state building and task_state spawning. [ 595.251862] env[68674]: DEBUG nova.compute.manager [req-af30862c-bdcd-43cd-9bbe-d4e2e4e77e23 req-360ea52a-f2f6-4f09-bd5b-a6954db33c29 service nova] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Received event network-changed-b9f6f4d8-6ea5-4035-a9e6-ed1462036f63 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 595.251862] env[68674]: DEBUG nova.compute.manager [req-af30862c-bdcd-43cd-9bbe-d4e2e4e77e23 req-360ea52a-f2f6-4f09-bd5b-a6954db33c29 service nova] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Refreshing instance network info cache due to event network-changed-b9f6f4d8-6ea5-4035-a9e6-ed1462036f63. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 595.251862] env[68674]: DEBUG oslo_concurrency.lockutils [req-af30862c-bdcd-43cd-9bbe-d4e2e4e77e23 req-360ea52a-f2f6-4f09-bd5b-a6954db33c29 service nova] Acquiring lock "refresh_cache-503e9328-bbd8-414f-8bea-250ed8247d67" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 595.251862] env[68674]: DEBUG oslo_concurrency.lockutils [req-af30862c-bdcd-43cd-9bbe-d4e2e4e77e23 req-360ea52a-f2f6-4f09-bd5b-a6954db33c29 service nova] Acquired lock "refresh_cache-503e9328-bbd8-414f-8bea-250ed8247d67" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 595.252274] env[68674]: DEBUG nova.network.neutron [req-af30862c-bdcd-43cd-9bbe-d4e2e4e77e23 req-360ea52a-f2f6-4f09-bd5b-a6954db33c29 service nova] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Refreshing network info cache for port b9f6f4d8-6ea5-4035-a9e6-ed1462036f63 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 595.390285] env[68674]: INFO nova.compute.manager [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Took 23.16 seconds to build instance. [ 595.403382] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.774s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 595.404081] env[68674]: DEBUG nova.compute.manager [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 595.406895] env[68674]: DEBUG oslo_concurrency.lockutils [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.063s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 595.408411] env[68674]: INFO nova.compute.claims [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 595.509160] env[68674]: DEBUG oslo_vmware.api [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Task: {'id': task-3239452, 'name': Rename_Task, 'duration_secs': 0.178805} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.509160] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 595.509376] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-242412d3-e98d-4c28-b608-f48c3b53d85a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.518837] env[68674]: DEBUG oslo_vmware.api [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Waiting for the task: (returnval){ [ 595.518837] env[68674]: value = "task-3239454" [ 595.518837] env[68674]: _type = "Task" [ 595.518837] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.531516] env[68674]: DEBUG oslo_vmware.api [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Task: {'id': task-3239454, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.571447] env[68674]: DEBUG oslo_vmware.api [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Task: {'id': task-3239453, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.605466] env[68674]: DEBUG oslo_vmware.api [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523ef96b-eccd-32a9-6b3b-c4ffaced46fa, 'name': SearchDatastore_Task, 'duration_secs': 0.013163} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.606486] env[68674]: DEBUG nova.network.neutron [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Successfully updated port: 21aba0b8-ff69-4bec-829d-29a8f6941877 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 595.608045] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 595.608045] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 595.608239] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 595.608451] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 595.608658] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 595.609476] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3d08c118-186e-441d-8a0c-fb8760c099c0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.622423] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 595.622423] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 595.626115] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b76fd501-2cc7-419d-a359-bce5186d7ba1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.630469] env[68674]: DEBUG oslo_vmware.api [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Waiting for the task: (returnval){ [ 595.630469] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b0cc3d-772c-a6e9-1936-054adc65a80b" [ 595.630469] env[68674]: _type = "Task" [ 595.630469] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.642544] env[68674]: DEBUG oslo_vmware.api [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b0cc3d-772c-a6e9-1936-054adc65a80b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.744305] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Acquiring lock "f45200cd-6cb0-498a-8858-1e70177031d9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 595.744489] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Lock "f45200cd-6cb0-498a-8858-1e70177031d9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 595.812442] env[68674]: DEBUG nova.compute.manager [req-34dd31d0-fe06-49f8-89f2-637d639f1ec7 req-2c214752-d358-4474-8528-245a7c59521a service nova] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Received event network-vif-plugged-21aba0b8-ff69-4bec-829d-29a8f6941877 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 595.812923] env[68674]: DEBUG oslo_concurrency.lockutils [req-34dd31d0-fe06-49f8-89f2-637d639f1ec7 req-2c214752-d358-4474-8528-245a7c59521a service nova] Acquiring lock "50bb7509-b7e9-4dc3-9746-acd46010cc26-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 595.812923] env[68674]: DEBUG oslo_concurrency.lockutils [req-34dd31d0-fe06-49f8-89f2-637d639f1ec7 req-2c214752-d358-4474-8528-245a7c59521a service nova] Lock "50bb7509-b7e9-4dc3-9746-acd46010cc26-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 595.813116] env[68674]: DEBUG oslo_concurrency.lockutils [req-34dd31d0-fe06-49f8-89f2-637d639f1ec7 req-2c214752-d358-4474-8528-245a7c59521a service nova] Lock "50bb7509-b7e9-4dc3-9746-acd46010cc26-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 595.815123] env[68674]: DEBUG nova.compute.manager [req-34dd31d0-fe06-49f8-89f2-637d639f1ec7 req-2c214752-d358-4474-8528-245a7c59521a service nova] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] No waiting events found dispatching network-vif-plugged-21aba0b8-ff69-4bec-829d-29a8f6941877 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 595.815123] env[68674]: WARNING nova.compute.manager [req-34dd31d0-fe06-49f8-89f2-637d639f1ec7 req-2c214752-d358-4474-8528-245a7c59521a service nova] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Received unexpected event network-vif-plugged-21aba0b8-ff69-4bec-829d-29a8f6941877 for instance with vm_state building and task_state spawning. [ 595.892041] env[68674]: DEBUG oslo_concurrency.lockutils [None req-55c65656-e206-4f68-a98f-a57824d7a71a tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Lock "d1c7a508-7d45-4eff-bb06-b85bfe392772" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.674s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 595.913318] env[68674]: DEBUG nova.compute.utils [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 595.919025] env[68674]: DEBUG nova.compute.manager [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 595.919025] env[68674]: DEBUG nova.network.neutron [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 596.008185] env[68674]: DEBUG nova.policy [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '50e46e8c9fbc4778b5f89359ae81bfa3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f6b179855b874365964446f95f9f5a53', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 596.031039] env[68674]: DEBUG oslo_vmware.api [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Task: {'id': task-3239454, 'name': PowerOnVM_Task, 'duration_secs': 0.491196} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.031399] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 596.032840] env[68674]: DEBUG nova.compute.manager [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 596.034055] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9897172-45cb-492b-8bd8-6144bdda8984 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.074449] env[68674]: DEBUG oslo_vmware.api [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Task: {'id': task-3239453, 'name': ReconfigVM_Task, 'duration_secs': 0.533983} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.075184] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Reconfigured VM instance instance-0000000e to attach disk [datastore1] 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d/9e337960-78c1-4ddb-a6f6-d6fd57dbf86d.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 596.075955] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-62ae6191-87f8-480d-9833-95d8e0411831 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.079341] env[68674]: DEBUG oslo_concurrency.lockutils [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Acquiring lock "a123c5f2-e775-4dd2-9a5a-35e7d6705dfa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 596.079655] env[68674]: DEBUG oslo_concurrency.lockutils [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Lock "a123c5f2-e775-4dd2-9a5a-35e7d6705dfa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 596.079994] env[68674]: DEBUG oslo_concurrency.lockutils [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Acquiring lock "a123c5f2-e775-4dd2-9a5a-35e7d6705dfa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 596.080247] env[68674]: DEBUG oslo_concurrency.lockutils [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Lock "a123c5f2-e775-4dd2-9a5a-35e7d6705dfa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 596.080530] env[68674]: DEBUG oslo_concurrency.lockutils [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Lock "a123c5f2-e775-4dd2-9a5a-35e7d6705dfa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 596.083309] env[68674]: INFO nova.compute.manager [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Terminating instance [ 596.089260] env[68674]: DEBUG oslo_vmware.api [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Waiting for the task: (returnval){ [ 596.089260] env[68674]: value = "task-3239455" [ 596.089260] env[68674]: _type = "Task" [ 596.089260] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.103015] env[68674]: DEBUG oslo_vmware.api [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Task: {'id': task-3239455, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.110022] env[68674]: DEBUG oslo_concurrency.lockutils [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Acquiring lock "refresh_cache-50bb7509-b7e9-4dc3-9746-acd46010cc26" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.110217] env[68674]: DEBUG oslo_concurrency.lockutils [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Acquired lock "refresh_cache-50bb7509-b7e9-4dc3-9746-acd46010cc26" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 596.110382] env[68674]: DEBUG nova.network.neutron [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 596.143188] env[68674]: DEBUG oslo_vmware.api [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b0cc3d-772c-a6e9-1936-054adc65a80b, 'name': SearchDatastore_Task, 'duration_secs': 0.017824} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.144134] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7202b816-4c51-4b41-bf4d-d865112c2c19 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.151277] env[68674]: DEBUG oslo_vmware.api [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Waiting for the task: (returnval){ [ 596.151277] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f8a28f-17d9-a2ba-e299-0ea7e4a51d04" [ 596.151277] env[68674]: _type = "Task" [ 596.151277] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.164712] env[68674]: DEBUG oslo_vmware.api [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f8a28f-17d9-a2ba-e299-0ea7e4a51d04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.245729] env[68674]: DEBUG oslo_concurrency.lockutils [None req-86d8f11e-4187-4e90-9b6c-2a0a5c4d9342 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Acquiring lock "58830b0e-dbf3-424d-8b9a-bb298b6bea21" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 596.246666] env[68674]: DEBUG oslo_concurrency.lockutils [None req-86d8f11e-4187-4e90-9b6c-2a0a5c4d9342 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Lock "58830b0e-dbf3-424d-8b9a-bb298b6bea21" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 596.247198] env[68674]: DEBUG oslo_concurrency.lockutils [None req-86d8f11e-4187-4e90-9b6c-2a0a5c4d9342 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Acquiring lock "58830b0e-dbf3-424d-8b9a-bb298b6bea21-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 596.248643] env[68674]: DEBUG oslo_concurrency.lockutils [None req-86d8f11e-4187-4e90-9b6c-2a0a5c4d9342 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Lock "58830b0e-dbf3-424d-8b9a-bb298b6bea21-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 596.248898] env[68674]: DEBUG oslo_concurrency.lockutils [None req-86d8f11e-4187-4e90-9b6c-2a0a5c4d9342 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Lock "58830b0e-dbf3-424d-8b9a-bb298b6bea21-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 596.252857] env[68674]: INFO nova.compute.manager [None req-86d8f11e-4187-4e90-9b6c-2a0a5c4d9342 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Terminating instance [ 596.395472] env[68674]: DEBUG nova.compute.manager [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 596.419583] env[68674]: DEBUG nova.compute.manager [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 596.518494] env[68674]: DEBUG nova.network.neutron [req-af30862c-bdcd-43cd-9bbe-d4e2e4e77e23 req-360ea52a-f2f6-4f09-bd5b-a6954db33c29 service nova] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Updated VIF entry in instance network info cache for port b9f6f4d8-6ea5-4035-a9e6-ed1462036f63. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 596.519204] env[68674]: DEBUG nova.network.neutron [req-af30862c-bdcd-43cd-9bbe-d4e2e4e77e23 req-360ea52a-f2f6-4f09-bd5b-a6954db33c29 service nova] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Updating instance_info_cache with network_info: [{"id": "b9f6f4d8-6ea5-4035-a9e6-ed1462036f63", "address": "fa:16:3e:38:28:4f", "network": {"id": "ec06235b-8062-47c5-b224-7a61c2daddb4", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-844083245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cda9cc02a4542dca3a0f16209eb4101", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "085fb0ff-9285-4f1d-a008-a14da4844357", "external-id": "nsx-vlan-transportzone-729", "segmentation_id": 729, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9f6f4d8-6e", "ovs_interfaceid": "b9f6f4d8-6ea5-4035-a9e6-ed1462036f63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 596.559723] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 596.587725] env[68674]: DEBUG oslo_concurrency.lockutils [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Acquiring lock "refresh_cache-a123c5f2-e775-4dd2-9a5a-35e7d6705dfa" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.588019] env[68674]: DEBUG oslo_concurrency.lockutils [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Acquired lock "refresh_cache-a123c5f2-e775-4dd2-9a5a-35e7d6705dfa" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 596.588346] env[68674]: DEBUG nova.network.neutron [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 596.603074] env[68674]: DEBUG oslo_vmware.api [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Task: {'id': task-3239455, 'name': Rename_Task, 'duration_secs': 0.253878} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.603074] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 596.604268] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-61639f21-1803-490c-ab97-8bc22fd70ba6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.617906] env[68674]: DEBUG oslo_vmware.api [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Waiting for the task: (returnval){ [ 596.617906] env[68674]: value = "task-3239456" [ 596.617906] env[68674]: _type = "Task" [ 596.617906] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.631265] env[68674]: DEBUG oslo_vmware.api [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Task: {'id': task-3239456, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.663684] env[68674]: DEBUG oslo_vmware.api [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f8a28f-17d9-a2ba-e299-0ea7e4a51d04, 'name': SearchDatastore_Task, 'duration_secs': 0.026606} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.664015] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 596.664359] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 357b515d-ef37-4688-969e-f894be30edb7/357b515d-ef37-4688-969e-f894be30edb7.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 596.664685] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ed70802a-de7f-4d74-ac13-41c42b8d413a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.679145] env[68674]: DEBUG oslo_vmware.api [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Waiting for the task: (returnval){ [ 596.679145] env[68674]: value = "task-3239457" [ 596.679145] env[68674]: _type = "Task" [ 596.679145] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.691676] env[68674]: DEBUG oslo_vmware.api [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239457, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.712027] env[68674]: DEBUG nova.network.neutron [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 596.760750] env[68674]: DEBUG nova.compute.manager [None req-86d8f11e-4187-4e90-9b6c-2a0a5c4d9342 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 596.761552] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-86d8f11e-4187-4e90-9b6c-2a0a5c4d9342 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 596.762477] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fdd5dab-6999-4302-8dd0-b6d0701ebc19 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.776875] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-86d8f11e-4187-4e90-9b6c-2a0a5c4d9342 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 596.780223] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6bd4cb3c-312e-444d-87c7-e906374db828 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.792732] env[68674]: DEBUG nova.compute.manager [None req-b4d80c29-6478-4c73-9494-c7ab19a6c775 tempest-ServerExternalEventsTest-388865042 tempest-ServerExternalEventsTest-388865042-project] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Received event network-changed {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 596.793336] env[68674]: DEBUG nova.compute.manager [None req-b4d80c29-6478-4c73-9494-c7ab19a6c775 tempest-ServerExternalEventsTest-388865042 tempest-ServerExternalEventsTest-388865042-project] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Refreshing instance network info cache due to event network-changed. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 596.793774] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b4d80c29-6478-4c73-9494-c7ab19a6c775 tempest-ServerExternalEventsTest-388865042 tempest-ServerExternalEventsTest-388865042-project] Acquiring lock "refresh_cache-d1c7a508-7d45-4eff-bb06-b85bfe392772" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.793774] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b4d80c29-6478-4c73-9494-c7ab19a6c775 tempest-ServerExternalEventsTest-388865042 tempest-ServerExternalEventsTest-388865042-project] Acquired lock "refresh_cache-d1c7a508-7d45-4eff-bb06-b85bfe392772" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 596.794042] env[68674]: DEBUG nova.network.neutron [None req-b4d80c29-6478-4c73-9494-c7ab19a6c775 tempest-ServerExternalEventsTest-388865042 tempest-ServerExternalEventsTest-388865042-project] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 596.800294] env[68674]: DEBUG oslo_vmware.api [None req-86d8f11e-4187-4e90-9b6c-2a0a5c4d9342 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Waiting for the task: (returnval){ [ 596.800294] env[68674]: value = "task-3239458" [ 596.800294] env[68674]: _type = "Task" [ 596.800294] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.835009] env[68674]: DEBUG oslo_vmware.api [None req-86d8f11e-4187-4e90-9b6c-2a0a5c4d9342 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Task: {'id': task-3239458, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.922875] env[68674]: DEBUG nova.network.neutron [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Successfully updated port: 00325c29-c6d8-4e3b-9410-aac7c5b9d5d2 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 596.925983] env[68674]: DEBUG oslo_concurrency.lockutils [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 596.990109] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ebefdb9-f245-4bf9-8807-a283139cbdcc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.001316] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9312d668-e774-4e87-8a52-eb4c00a6c4dc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.044634] env[68674]: DEBUG oslo_concurrency.lockutils [req-af30862c-bdcd-43cd-9bbe-d4e2e4e77e23 req-360ea52a-f2f6-4f09-bd5b-a6954db33c29 service nova] Releasing lock "refresh_cache-503e9328-bbd8-414f-8bea-250ed8247d67" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 597.044917] env[68674]: DEBUG nova.compute.manager [req-af30862c-bdcd-43cd-9bbe-d4e2e4e77e23 req-360ea52a-f2f6-4f09-bd5b-a6954db33c29 service nova] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Received event network-changed-4f848177-8140-4862-a7f0-f901b045c157 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 597.045029] env[68674]: DEBUG nova.compute.manager [req-af30862c-bdcd-43cd-9bbe-d4e2e4e77e23 req-360ea52a-f2f6-4f09-bd5b-a6954db33c29 service nova] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Refreshing instance network info cache due to event network-changed-4f848177-8140-4862-a7f0-f901b045c157. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 597.045253] env[68674]: DEBUG oslo_concurrency.lockutils [req-af30862c-bdcd-43cd-9bbe-d4e2e4e77e23 req-360ea52a-f2f6-4f09-bd5b-a6954db33c29 service nova] Acquiring lock "refresh_cache-357b515d-ef37-4688-969e-f894be30edb7" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 597.045438] env[68674]: DEBUG oslo_concurrency.lockutils [req-af30862c-bdcd-43cd-9bbe-d4e2e4e77e23 req-360ea52a-f2f6-4f09-bd5b-a6954db33c29 service nova] Acquired lock "refresh_cache-357b515d-ef37-4688-969e-f894be30edb7" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 597.045825] env[68674]: DEBUG nova.network.neutron [req-af30862c-bdcd-43cd-9bbe-d4e2e4e77e23 req-360ea52a-f2f6-4f09-bd5b-a6954db33c29 service nova] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Refreshing network info cache for port 4f848177-8140-4862-a7f0-f901b045c157 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 597.048886] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eedcce2e-34c1-4c1c-97b7-dfeb4f99d1ac {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.052833] env[68674]: DEBUG nova.network.neutron [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Successfully created port: 926b65c9-79eb-4f2d-88ef-f00c20e240f5 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 597.066744] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-920bfaa2-30e6-49dc-8601-60142dfdf055 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.084632] env[68674]: DEBUG nova.compute.provider_tree [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 597.133103] env[68674]: DEBUG oslo_vmware.api [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Task: {'id': task-3239456, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.135742] env[68674]: DEBUG nova.network.neutron [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 597.196077] env[68674]: DEBUG oslo_vmware.api [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239457, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.323148] env[68674]: DEBUG oslo_vmware.api [None req-86d8f11e-4187-4e90-9b6c-2a0a5c4d9342 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Task: {'id': task-3239458, 'name': PowerOffVM_Task, 'duration_secs': 0.249188} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.323535] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-86d8f11e-4187-4e90-9b6c-2a0a5c4d9342 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 597.323641] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-86d8f11e-4187-4e90-9b6c-2a0a5c4d9342 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 597.323938] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4a67e6a1-030c-4624-9dee-789febb5ec61 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.346576] env[68674]: DEBUG nova.network.neutron [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 597.426852] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Acquiring lock "refresh_cache-60ded0c9-7e20-4071-b5ce-9189d8d01d5c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 597.426995] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Acquired lock "refresh_cache-60ded0c9-7e20-4071-b5ce-9189d8d01d5c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 597.427672] env[68674]: DEBUG nova.network.neutron [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 597.443561] env[68674]: DEBUG nova.compute.manager [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 597.473722] env[68674]: DEBUG nova.virt.hardware [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 597.474047] env[68674]: DEBUG nova.virt.hardware [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 597.474365] env[68674]: DEBUG nova.virt.hardware [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 597.477532] env[68674]: DEBUG nova.virt.hardware [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 597.477894] env[68674]: DEBUG nova.virt.hardware [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 597.478013] env[68674]: DEBUG nova.virt.hardware [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 597.478253] env[68674]: DEBUG nova.virt.hardware [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 597.478411] env[68674]: DEBUG nova.virt.hardware [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 597.478591] env[68674]: DEBUG nova.virt.hardware [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 597.478756] env[68674]: DEBUG nova.virt.hardware [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 597.478937] env[68674]: DEBUG nova.virt.hardware [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 597.479887] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df764e5b-8c8e-4ab7-b3d1-475b792855e7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.489688] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b32ca18c-0c3f-4570-b518-848b5c8325b5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.509081] env[68674]: DEBUG nova.network.neutron [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Updating instance_info_cache with network_info: [{"id": "21aba0b8-ff69-4bec-829d-29a8f6941877", "address": "fa:16:3e:30:46:30", "network": {"id": "f11cd326-6319-47eb-833d-5282731628e9", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-287739122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e05a97545e94e8a9be8f382457d79b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f1b507ed-cd2d-4c09-9d96-c47bde6a7774", "external-id": "nsx-vlan-transportzone-980", "segmentation_id": 980, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21aba0b8-ff", "ovs_interfaceid": "21aba0b8-ff69-4bec-829d-29a8f6941877", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 597.586522] env[68674]: DEBUG nova.scheduler.client.report [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 597.632849] env[68674]: DEBUG oslo_vmware.api [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Task: {'id': task-3239456, 'name': PowerOnVM_Task} progress is 78%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.696016] env[68674]: DEBUG oslo_vmware.api [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239457, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.809238} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.696481] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 357b515d-ef37-4688-969e-f894be30edb7/357b515d-ef37-4688-969e-f894be30edb7.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 597.696835] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 597.697248] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-653a2646-7ecc-40f9-9d3d-684ad6555345 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.707869] env[68674]: DEBUG oslo_vmware.api [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Waiting for the task: (returnval){ [ 597.707869] env[68674]: value = "task-3239460" [ 597.707869] env[68674]: _type = "Task" [ 597.707869] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.720549] env[68674]: DEBUG oslo_vmware.api [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239460, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.852702] env[68674]: DEBUG oslo_concurrency.lockutils [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Releasing lock "refresh_cache-a123c5f2-e775-4dd2-9a5a-35e7d6705dfa" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 597.853345] env[68674]: DEBUG nova.compute.manager [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 597.853628] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 597.854491] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b71c706-9194-4e52-8e81-f45ecef65371 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.864598] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 597.864905] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-78c47e61-79ca-4fe8-a671-c7d5dbf40796 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.874070] env[68674]: DEBUG oslo_vmware.api [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Waiting for the task: (returnval){ [ 597.874070] env[68674]: value = "task-3239462" [ 597.874070] env[68674]: _type = "Task" [ 597.874070] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.883980] env[68674]: DEBUG oslo_vmware.api [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Task: {'id': task-3239462, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.013041] env[68674]: DEBUG oslo_concurrency.lockutils [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Releasing lock "refresh_cache-50bb7509-b7e9-4dc3-9746-acd46010cc26" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 598.013041] env[68674]: DEBUG nova.compute.manager [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Instance network_info: |[{"id": "21aba0b8-ff69-4bec-829d-29a8f6941877", "address": "fa:16:3e:30:46:30", "network": {"id": "f11cd326-6319-47eb-833d-5282731628e9", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-287739122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e05a97545e94e8a9be8f382457d79b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f1b507ed-cd2d-4c09-9d96-c47bde6a7774", "external-id": "nsx-vlan-transportzone-980", "segmentation_id": 980, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21aba0b8-ff", "ovs_interfaceid": "21aba0b8-ff69-4bec-829d-29a8f6941877", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 598.013257] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:30:46:30', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f1b507ed-cd2d-4c09-9d96-c47bde6a7774', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '21aba0b8-ff69-4bec-829d-29a8f6941877', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 598.027213] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Creating folder: Project (8e05a97545e94e8a9be8f382457d79b2). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 598.028585] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-473f12d5-4eb9-42e5-a241-ab04b708052a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.043236] env[68674]: DEBUG nova.network.neutron [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 598.048573] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Created folder: Project (8e05a97545e94e8a9be8f382457d79b2) in parent group-v647377. [ 598.049188] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Creating folder: Instances. Parent ref: group-v647426. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 598.049188] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c3a32421-d299-4b9a-b3b1-67ade3ee9518 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.064553] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Created folder: Instances in parent group-v647426. [ 598.065455] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 598.065688] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 598.065911] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d7d0a624-6c34-4099-9e80-7fffa3cbbcd1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.097670] env[68674]: DEBUG nova.network.neutron [None req-b4d80c29-6478-4c73-9494-c7ab19a6c775 tempest-ServerExternalEventsTest-388865042 tempest-ServerExternalEventsTest-388865042-project] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Updating instance_info_cache with network_info: [{"id": "5da6718a-7772-42cf-869d-77f84c2984f9", "address": "fa:16:3e:ec:a5:54", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.188", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5da6718a-77", "ovs_interfaceid": "5da6718a-7772-42cf-869d-77f84c2984f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.099689] env[68674]: DEBUG oslo_concurrency.lockutils [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.693s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 598.101017] env[68674]: DEBUG nova.compute.manager [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 598.109396] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 15.106s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 598.109608] env[68674]: DEBUG nova.objects.instance [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68674) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 598.116255] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-86d8f11e-4187-4e90-9b6c-2a0a5c4d9342 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 598.117330] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-86d8f11e-4187-4e90-9b6c-2a0a5c4d9342 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 598.117330] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-86d8f11e-4187-4e90-9b6c-2a0a5c4d9342 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Deleting the datastore file [datastore1] 58830b0e-dbf3-424d-8b9a-bb298b6bea21 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 598.117330] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 598.117330] env[68674]: value = "task-3239465" [ 598.117330] env[68674]: _type = "Task" [ 598.117330] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.117977] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-be230492-8957-4016-92c4-216e4e4525bd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.134357] env[68674]: DEBUG oslo_vmware.api [None req-86d8f11e-4187-4e90-9b6c-2a0a5c4d9342 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Waiting for the task: (returnval){ [ 598.134357] env[68674]: value = "task-3239466" [ 598.134357] env[68674]: _type = "Task" [ 598.134357] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.147432] env[68674]: DEBUG oslo_vmware.api [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Task: {'id': task-3239456, 'name': PowerOnVM_Task, 'duration_secs': 1.213797} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.148412] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239465, 'name': CreateVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.149836] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 598.149836] env[68674]: INFO nova.compute.manager [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Took 11.63 seconds to spawn the instance on the hypervisor. [ 598.149836] env[68674]: DEBUG nova.compute.manager [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 598.150775] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f2e7701-7f2b-4f8c-b595-765f146ddabe {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.159069] env[68674]: DEBUG oslo_vmware.api [None req-86d8f11e-4187-4e90-9b6c-2a0a5c4d9342 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Task: {'id': task-3239466, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.222167] env[68674]: DEBUG oslo_vmware.api [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239460, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.102784} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.222958] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 598.223769] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9db8025-9691-496e-b50e-4b31b4b1828d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.258618] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] 357b515d-ef37-4688-969e-f894be30edb7/357b515d-ef37-4688-969e-f894be30edb7.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 598.258618] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da26a633-d8d5-4c03-b0d3-8e830c9f922b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.294830] env[68674]: DEBUG oslo_vmware.api [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Waiting for the task: (returnval){ [ 598.294830] env[68674]: value = "task-3239467" [ 598.294830] env[68674]: _type = "Task" [ 598.294830] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.308312] env[68674]: DEBUG oslo_vmware.api [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239467, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.595653] env[68674]: DEBUG oslo_vmware.api [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Task: {'id': task-3239462, 'name': PowerOffVM_Task, 'duration_secs': 0.1876} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.595653] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 598.595653] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 598.595653] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ac89fc9b-fc8a-4d23-864c-ad3d7c463bb7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.609014] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b4d80c29-6478-4c73-9494-c7ab19a6c775 tempest-ServerExternalEventsTest-388865042 tempest-ServerExternalEventsTest-388865042-project] Releasing lock "refresh_cache-d1c7a508-7d45-4eff-bb06-b85bfe392772" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 598.623656] env[68674]: DEBUG nova.compute.utils [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 598.644934] env[68674]: DEBUG nova.compute.manager [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 598.644934] env[68674]: DEBUG nova.network.neutron [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 598.651018] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 598.651646] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 598.651901] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Deleting the datastore file [datastore1] a123c5f2-e775-4dd2-9a5a-35e7d6705dfa {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 598.670083] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c0af9634-44d1-42db-a2ce-7c8c46af6d81 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.706250] env[68674]: DEBUG oslo_vmware.api [None req-86d8f11e-4187-4e90-9b6c-2a0a5c4d9342 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Task: {'id': task-3239466, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.269589} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.706250] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239465, 'name': CreateVM_Task, 'duration_secs': 0.485656} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.708982] env[68674]: INFO nova.compute.manager [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Took 24.62 seconds to build instance. [ 598.709037] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-86d8f11e-4187-4e90-9b6c-2a0a5c4d9342 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 598.710411] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-86d8f11e-4187-4e90-9b6c-2a0a5c4d9342 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 598.710652] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-86d8f11e-4187-4e90-9b6c-2a0a5c4d9342 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 598.711519] env[68674]: INFO nova.compute.manager [None req-86d8f11e-4187-4e90-9b6c-2a0a5c4d9342 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Took 1.95 seconds to destroy the instance on the hypervisor. [ 598.711873] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-86d8f11e-4187-4e90-9b6c-2a0a5c4d9342 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 598.712062] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 598.712462] env[68674]: DEBUG oslo_vmware.api [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Waiting for the task: (returnval){ [ 598.712462] env[68674]: value = "task-3239469" [ 598.712462] env[68674]: _type = "Task" [ 598.712462] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.712948] env[68674]: DEBUG nova.compute.manager [-] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 598.713133] env[68674]: DEBUG nova.network.neutron [-] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 598.718653] env[68674]: DEBUG oslo_concurrency.lockutils [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.718653] env[68674]: DEBUG oslo_concurrency.lockutils [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 598.718653] env[68674]: DEBUG oslo_concurrency.lockutils [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 598.718653] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-818a2793-235d-479a-a146-d87f63ef825d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.725242] env[68674]: DEBUG nova.network.neutron [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Updating instance_info_cache with network_info: [{"id": "00325c29-c6d8-4e3b-9410-aac7c5b9d5d2", "address": "fa:16:3e:70:76:62", "network": {"id": "5674f315-1fc1-48df-8016-8dfaddb1bbf5", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-878396495-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c5be31196e1f452e8768b57c105d1765", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00325c29-c6", "ovs_interfaceid": "00325c29-c6d8-4e3b-9410-aac7c5b9d5d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.734679] env[68674]: DEBUG oslo_vmware.api [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Task: {'id': task-3239469, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.739807] env[68674]: DEBUG oslo_vmware.api [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Waiting for the task: (returnval){ [ 598.739807] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524870b7-1636-8ce8-4b36-e9fbfc55b5ae" [ 598.739807] env[68674]: _type = "Task" [ 598.739807] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.754050] env[68674]: DEBUG oslo_vmware.api [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524870b7-1636-8ce8-4b36-e9fbfc55b5ae, 'name': SearchDatastore_Task, 'duration_secs': 0.017139} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.755873] env[68674]: DEBUG nova.network.neutron [req-af30862c-bdcd-43cd-9bbe-d4e2e4e77e23 req-360ea52a-f2f6-4f09-bd5b-a6954db33c29 service nova] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Updated VIF entry in instance network info cache for port 4f848177-8140-4862-a7f0-f901b045c157. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 598.757406] env[68674]: DEBUG nova.network.neutron [req-af30862c-bdcd-43cd-9bbe-d4e2e4e77e23 req-360ea52a-f2f6-4f09-bd5b-a6954db33c29 service nova] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Updating instance_info_cache with network_info: [{"id": "4f848177-8140-4862-a7f0-f901b045c157", "address": "fa:16:3e:2f:19:f3", "network": {"id": "f82002f4-7eb1-4240-818b-90533b23aec4", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2040854814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc7acf9ab6ee4ce49cc6d971fa212411", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbd7899c-c96e-47fc-9141-5803b646917a", "external-id": "nsx-vlan-transportzone-333", "segmentation_id": 333, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f848177-81", "ovs_interfaceid": "4f848177-8140-4862-a7f0-f901b045c157", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.758050] env[68674]: DEBUG oslo_concurrency.lockutils [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 598.758427] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 598.758427] env[68674]: DEBUG oslo_concurrency.lockutils [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.759356] env[68674]: DEBUG oslo_concurrency.lockutils [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 598.759356] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 598.759454] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8d287887-5535-4369-a3cb-37a81f95f79c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.786613] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 598.786817] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 598.791583] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8353733-f3bc-426c-a9a3-d8be5d00418e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.797773] env[68674]: DEBUG oslo_vmware.api [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Waiting for the task: (returnval){ [ 598.797773] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528e65bb-dc9d-645e-1f64-33b52332405d" [ 598.797773] env[68674]: _type = "Task" [ 598.797773] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.817298] env[68674]: DEBUG oslo_vmware.api [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239467, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.820259] env[68674]: DEBUG oslo_vmware.api [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528e65bb-dc9d-645e-1f64-33b52332405d, 'name': SearchDatastore_Task, 'duration_secs': 0.016722} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.821486] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-291d9bad-321a-435f-a387-cb7f823ff2fb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.833441] env[68674]: DEBUG oslo_vmware.api [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Waiting for the task: (returnval){ [ 598.833441] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]529ae351-922c-de70-1912-cb748addbcfe" [ 598.833441] env[68674]: _type = "Task" [ 598.833441] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.853620] env[68674]: DEBUG oslo_vmware.api [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]529ae351-922c-de70-1912-cb748addbcfe, 'name': SearchDatastore_Task, 'duration_secs': 0.014552} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.853998] env[68674]: DEBUG oslo_concurrency.lockutils [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 598.854545] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 50bb7509-b7e9-4dc3-9746-acd46010cc26/50bb7509-b7e9-4dc3-9746-acd46010cc26.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 598.854628] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eb93f07b-6eef-4a6a-866a-c7a9ca9cdef9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.869160] env[68674]: DEBUG oslo_vmware.api [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Waiting for the task: (returnval){ [ 598.869160] env[68674]: value = "task-3239470" [ 598.869160] env[68674]: _type = "Task" [ 598.869160] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.887112] env[68674]: DEBUG nova.policy [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '18877733b43148b382441a654e998def', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1107b48f3c0c4b85ad514b915b1c89e2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 598.901863] env[68674]: DEBUG oslo_vmware.api [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3239470, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.140561] env[68674]: DEBUG nova.compute.manager [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 599.154237] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2c5b401f-ba8f-40b8-b91e-2a6e0e1f4caa tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.045s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 599.157823] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.073s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 599.159266] env[68674]: INFO nova.compute.claims [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 599.215616] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c8467ff1-752d-483b-a60a-5becd65afcd3 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Lock "9e337960-78c1-4ddb-a6f6-d6fd57dbf86d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.142s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 599.227917] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Releasing lock "refresh_cache-60ded0c9-7e20-4071-b5ce-9189d8d01d5c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 599.229557] env[68674]: DEBUG nova.compute.manager [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Instance network_info: |[{"id": "00325c29-c6d8-4e3b-9410-aac7c5b9d5d2", "address": "fa:16:3e:70:76:62", "network": {"id": "5674f315-1fc1-48df-8016-8dfaddb1bbf5", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-878396495-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c5be31196e1f452e8768b57c105d1765", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00325c29-c6", "ovs_interfaceid": "00325c29-c6d8-4e3b-9410-aac7c5b9d5d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 599.229914] env[68674]: DEBUG oslo_vmware.api [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Task: {'id': task-3239469, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161532} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.233070] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:70:76:62', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f5f016d1-34a6-4ebd-81ed-a6bf9d109b87', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '00325c29-c6d8-4e3b-9410-aac7c5b9d5d2', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 599.239775] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Creating folder: Project (c5be31196e1f452e8768b57c105d1765). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 599.240173] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 599.240351] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 599.240534] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 599.241837] env[68674]: INFO nova.compute.manager [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Took 1.39 seconds to destroy the instance on the hypervisor. [ 599.242123] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 599.242711] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f29985a5-4f0f-417f-99f0-436c391d00eb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.246695] env[68674]: DEBUG nova.compute.manager [-] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 599.246794] env[68674]: DEBUG nova.network.neutron [-] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 599.260748] env[68674]: DEBUG oslo_concurrency.lockutils [req-af30862c-bdcd-43cd-9bbe-d4e2e4e77e23 req-360ea52a-f2f6-4f09-bd5b-a6954db33c29 service nova] Releasing lock "refresh_cache-357b515d-ef37-4688-969e-f894be30edb7" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 599.263038] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Created folder: Project (c5be31196e1f452e8768b57c105d1765) in parent group-v647377. [ 599.267123] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Creating folder: Instances. Parent ref: group-v647429. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 599.267123] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8b1f5f94-bc3f-49c5-84d3-8a54a808a7aa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.277199] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Created folder: Instances in parent group-v647429. [ 599.279224] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 599.279224] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 599.279224] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-377e388c-ef3c-4926-af79-52df7f29c802 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.308160] env[68674]: DEBUG nova.compute.manager [req-a56f8fe7-adb9-431d-b830-0e333168216f req-7cc45433-e5c6-4938-aa10-80ab90195e65 service nova] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Received event network-vif-plugged-00325c29-c6d8-4e3b-9410-aac7c5b9d5d2 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 599.308349] env[68674]: DEBUG oslo_concurrency.lockutils [req-a56f8fe7-adb9-431d-b830-0e333168216f req-7cc45433-e5c6-4938-aa10-80ab90195e65 service nova] Acquiring lock "60ded0c9-7e20-4071-b5ce-9189d8d01d5c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 599.308995] env[68674]: DEBUG oslo_concurrency.lockutils [req-a56f8fe7-adb9-431d-b830-0e333168216f req-7cc45433-e5c6-4938-aa10-80ab90195e65 service nova] Lock "60ded0c9-7e20-4071-b5ce-9189d8d01d5c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 599.308995] env[68674]: DEBUG oslo_concurrency.lockutils [req-a56f8fe7-adb9-431d-b830-0e333168216f req-7cc45433-e5c6-4938-aa10-80ab90195e65 service nova] Lock "60ded0c9-7e20-4071-b5ce-9189d8d01d5c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 599.308995] env[68674]: DEBUG nova.compute.manager [req-a56f8fe7-adb9-431d-b830-0e333168216f req-7cc45433-e5c6-4938-aa10-80ab90195e65 service nova] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] No waiting events found dispatching network-vif-plugged-00325c29-c6d8-4e3b-9410-aac7c5b9d5d2 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 599.309365] env[68674]: WARNING nova.compute.manager [req-a56f8fe7-adb9-431d-b830-0e333168216f req-7cc45433-e5c6-4938-aa10-80ab90195e65 service nova] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Received unexpected event network-vif-plugged-00325c29-c6d8-4e3b-9410-aac7c5b9d5d2 for instance with vm_state building and task_state spawning. [ 599.309365] env[68674]: DEBUG nova.compute.manager [req-a56f8fe7-adb9-431d-b830-0e333168216f req-7cc45433-e5c6-4938-aa10-80ab90195e65 service nova] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Received event network-changed-00325c29-c6d8-4e3b-9410-aac7c5b9d5d2 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 599.309439] env[68674]: DEBUG nova.compute.manager [req-a56f8fe7-adb9-431d-b830-0e333168216f req-7cc45433-e5c6-4938-aa10-80ab90195e65 service nova] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Refreshing instance network info cache due to event network-changed-00325c29-c6d8-4e3b-9410-aac7c5b9d5d2. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 599.310079] env[68674]: DEBUG oslo_concurrency.lockutils [req-a56f8fe7-adb9-431d-b830-0e333168216f req-7cc45433-e5c6-4938-aa10-80ab90195e65 service nova] Acquiring lock "refresh_cache-60ded0c9-7e20-4071-b5ce-9189d8d01d5c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.310079] env[68674]: DEBUG oslo_concurrency.lockutils [req-a56f8fe7-adb9-431d-b830-0e333168216f req-7cc45433-e5c6-4938-aa10-80ab90195e65 service nova] Acquired lock "refresh_cache-60ded0c9-7e20-4071-b5ce-9189d8d01d5c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 599.310079] env[68674]: DEBUG nova.network.neutron [req-a56f8fe7-adb9-431d-b830-0e333168216f req-7cc45433-e5c6-4938-aa10-80ab90195e65 service nova] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Refreshing network info cache for port 00325c29-c6d8-4e3b-9410-aac7c5b9d5d2 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 599.314530] env[68674]: DEBUG nova.network.neutron [-] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 599.315536] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 599.315536] env[68674]: value = "task-3239473" [ 599.315536] env[68674]: _type = "Task" [ 599.315536] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.327340] env[68674]: DEBUG oslo_vmware.api [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239467, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.334813] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239473, 'name': CreateVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.368028] env[68674]: DEBUG oslo_concurrency.lockutils [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Acquiring lock "89ccc16e-d0e5-4f7d-985c-8693188e7ed5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 599.368028] env[68674]: DEBUG oslo_concurrency.lockutils [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Lock "89ccc16e-d0e5-4f7d-985c-8693188e7ed5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 599.368028] env[68674]: DEBUG oslo_concurrency.lockutils [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Acquiring lock "89ccc16e-d0e5-4f7d-985c-8693188e7ed5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 599.368028] env[68674]: DEBUG oslo_concurrency.lockutils [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Lock "89ccc16e-d0e5-4f7d-985c-8693188e7ed5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 599.368643] env[68674]: DEBUG oslo_concurrency.lockutils [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Lock "89ccc16e-d0e5-4f7d-985c-8693188e7ed5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 599.369786] env[68674]: INFO nova.compute.manager [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Terminating instance [ 599.383297] env[68674]: DEBUG oslo_vmware.api [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3239470, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.581967] env[68674]: DEBUG oslo_concurrency.lockutils [None req-590e8e33-94c0-4866-9706-0165d13ef7e1 tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Acquiring lock "d1c7a508-7d45-4eff-bb06-b85bfe392772" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 599.582269] env[68674]: DEBUG oslo_concurrency.lockutils [None req-590e8e33-94c0-4866-9706-0165d13ef7e1 tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Lock "d1c7a508-7d45-4eff-bb06-b85bfe392772" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 599.582938] env[68674]: DEBUG oslo_concurrency.lockutils [None req-590e8e33-94c0-4866-9706-0165d13ef7e1 tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Acquiring lock "d1c7a508-7d45-4eff-bb06-b85bfe392772-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 599.582938] env[68674]: DEBUG oslo_concurrency.lockutils [None req-590e8e33-94c0-4866-9706-0165d13ef7e1 tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Lock "d1c7a508-7d45-4eff-bb06-b85bfe392772-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 599.582938] env[68674]: DEBUG oslo_concurrency.lockutils [None req-590e8e33-94c0-4866-9706-0165d13ef7e1 tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Lock "d1c7a508-7d45-4eff-bb06-b85bfe392772-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 599.588198] env[68674]: INFO nova.compute.manager [None req-590e8e33-94c0-4866-9706-0165d13ef7e1 tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Terminating instance [ 599.620826] env[68674]: DEBUG oslo_vmware.rw_handles [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dc63f2-81e9-78ca-a366-4ecc0197c36c/disk-0.vmdk. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 599.621906] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cfedb46-e1a4-4d6f-b4f4-cf43d1cf4075 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.631317] env[68674]: DEBUG oslo_vmware.rw_handles [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dc63f2-81e9-78ca-a366-4ecc0197c36c/disk-0.vmdk is in state: ready. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 599.631545] env[68674]: ERROR oslo_vmware.rw_handles [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dc63f2-81e9-78ca-a366-4ecc0197c36c/disk-0.vmdk due to incomplete transfer. [ 599.631796] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-8067cee7-c512-4c49-a977-968b74a76b80 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.641865] env[68674]: DEBUG oslo_vmware.rw_handles [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dc63f2-81e9-78ca-a366-4ecc0197c36c/disk-0.vmdk. {{(pid=68674) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 599.642408] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Uploaded image 69b64169-5076-4f13-b5fb-4be17d6e4788 to the Glance image server {{(pid=68674) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 599.644067] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Destroying the VM {{(pid=68674) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 599.644356] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-54ac3112-0f69-4250-9330-aa6f90badc00 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.658143] env[68674]: DEBUG oslo_vmware.api [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 599.658143] env[68674]: value = "task-3239474" [ 599.658143] env[68674]: _type = "Task" [ 599.658143] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.669768] env[68674]: DEBUG oslo_vmware.api [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239474, 'name': Destroy_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.722327] env[68674]: DEBUG nova.compute.manager [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 599.816111] env[68674]: DEBUG oslo_vmware.api [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239467, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.832128] env[68674]: DEBUG nova.network.neutron [-] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 599.838741] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239473, 'name': CreateVM_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.878487] env[68674]: DEBUG oslo_concurrency.lockutils [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Acquiring lock "refresh_cache-89ccc16e-d0e5-4f7d-985c-8693188e7ed5" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.878726] env[68674]: DEBUG oslo_concurrency.lockutils [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Acquired lock "refresh_cache-89ccc16e-d0e5-4f7d-985c-8693188e7ed5" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 599.879160] env[68674]: DEBUG nova.network.neutron [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 599.886509] env[68674]: DEBUG oslo_vmware.api [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3239470, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.897709] env[68674]: DEBUG nova.compute.manager [req-f68745f0-9fa6-4036-9d5d-119bffe4a94e req-f1e8c987-50cc-4741-a56d-7fc667f56ddd service nova] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Received event network-changed-21aba0b8-ff69-4bec-829d-29a8f6941877 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 599.897905] env[68674]: DEBUG nova.compute.manager [req-f68745f0-9fa6-4036-9d5d-119bffe4a94e req-f1e8c987-50cc-4741-a56d-7fc667f56ddd service nova] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Refreshing instance network info cache due to event network-changed-21aba0b8-ff69-4bec-829d-29a8f6941877. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 599.898250] env[68674]: DEBUG oslo_concurrency.lockutils [req-f68745f0-9fa6-4036-9d5d-119bffe4a94e req-f1e8c987-50cc-4741-a56d-7fc667f56ddd service nova] Acquiring lock "refresh_cache-50bb7509-b7e9-4dc3-9746-acd46010cc26" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.898341] env[68674]: DEBUG oslo_concurrency.lockutils [req-f68745f0-9fa6-4036-9d5d-119bffe4a94e req-f1e8c987-50cc-4741-a56d-7fc667f56ddd service nova] Acquired lock "refresh_cache-50bb7509-b7e9-4dc3-9746-acd46010cc26" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 599.899224] env[68674]: DEBUG nova.network.neutron [req-f68745f0-9fa6-4036-9d5d-119bffe4a94e req-f1e8c987-50cc-4741-a56d-7fc667f56ddd service nova] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Refreshing network info cache for port 21aba0b8-ff69-4bec-829d-29a8f6941877 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 599.938762] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Acquiring lock "0097c367-bb3a-4b7b-9fcc-b3e3482689e2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 599.939027] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Lock "0097c367-bb3a-4b7b-9fcc-b3e3482689e2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 600.095481] env[68674]: DEBUG nova.compute.manager [None req-590e8e33-94c0-4866-9706-0165d13ef7e1 tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 600.095481] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-590e8e33-94c0-4866-9706-0165d13ef7e1 tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 600.095966] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bde97fb9-1c90-4fd9-8716-ed841ee5f2be {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.113260] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-590e8e33-94c0-4866-9706-0165d13ef7e1 tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 600.113489] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a1c4b53a-9701-41d4-aa4a-9a1117461a03 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.124498] env[68674]: DEBUG oslo_vmware.api [None req-590e8e33-94c0-4866-9706-0165d13ef7e1 tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Waiting for the task: (returnval){ [ 600.124498] env[68674]: value = "task-3239476" [ 600.124498] env[68674]: _type = "Task" [ 600.124498] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.145598] env[68674]: DEBUG oslo_vmware.api [None req-590e8e33-94c0-4866-9706-0165d13ef7e1 tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Task: {'id': task-3239476, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.146674] env[68674]: DEBUG nova.network.neutron [req-a56f8fe7-adb9-431d-b830-0e333168216f req-7cc45433-e5c6-4938-aa10-80ab90195e65 service nova] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Updated VIF entry in instance network info cache for port 00325c29-c6d8-4e3b-9410-aac7c5b9d5d2. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 600.150937] env[68674]: DEBUG nova.network.neutron [req-a56f8fe7-adb9-431d-b830-0e333168216f req-7cc45433-e5c6-4938-aa10-80ab90195e65 service nova] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Updating instance_info_cache with network_info: [{"id": "00325c29-c6d8-4e3b-9410-aac7c5b9d5d2", "address": "fa:16:3e:70:76:62", "network": {"id": "5674f315-1fc1-48df-8016-8dfaddb1bbf5", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-878396495-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c5be31196e1f452e8768b57c105d1765", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00325c29-c6", "ovs_interfaceid": "00325c29-c6d8-4e3b-9410-aac7c5b9d5d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.152835] env[68674]: DEBUG nova.compute.manager [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 600.179255] env[68674]: DEBUG oslo_vmware.api [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239474, 'name': Destroy_Task} progress is 33%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.204012] env[68674]: DEBUG nova.virt.hardware [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 600.204501] env[68674]: DEBUG nova.virt.hardware [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 600.204626] env[68674]: DEBUG nova.virt.hardware [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 600.204891] env[68674]: DEBUG nova.virt.hardware [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 600.205112] env[68674]: DEBUG nova.virt.hardware [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 600.205298] env[68674]: DEBUG nova.virt.hardware [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 600.207024] env[68674]: DEBUG nova.virt.hardware [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 600.207024] env[68674]: DEBUG nova.virt.hardware [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 600.207024] env[68674]: DEBUG nova.virt.hardware [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 600.207024] env[68674]: DEBUG nova.virt.hardware [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 600.207024] env[68674]: DEBUG nova.virt.hardware [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 600.207799] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d58fcb95-bb68-4141-b589-b83279f22f96 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.237961] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a51385f4-70c3-46db-8571-fb10d0ed9d1f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.263416] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 600.319605] env[68674]: DEBUG oslo_vmware.api [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239467, 'name': ReconfigVM_Task, 'duration_secs': 1.540705} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.324967] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Reconfigured VM instance instance-0000000f to attach disk [datastore1] 357b515d-ef37-4688-969e-f894be30edb7/357b515d-ef37-4688-969e-f894be30edb7.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 600.326181] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b7987016-41e1-4cde-a1a6-5b32e6870e02 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.329761] env[68674]: INFO nova.compute.manager [-] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Took 1.08 seconds to deallocate network for instance. [ 600.336705] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239473, 'name': CreateVM_Task, 'duration_secs': 0.716376} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.343482] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 600.346372] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.346852] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 600.346999] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 600.347433] env[68674]: DEBUG oslo_vmware.api [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Waiting for the task: (returnval){ [ 600.347433] env[68674]: value = "task-3239477" [ 600.347433] env[68674]: _type = "Task" [ 600.347433] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.347620] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56e49d80-5239-4e70-9890-96a4ca0fee80 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.364186] env[68674]: DEBUG oslo_vmware.api [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 600.364186] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5228a40d-dd27-996e-f5dd-e3e8da106cdd" [ 600.364186] env[68674]: _type = "Task" [ 600.364186] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.365997] env[68674]: DEBUG oslo_vmware.api [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239477, 'name': Rename_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.389790] env[68674]: DEBUG oslo_vmware.api [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5228a40d-dd27-996e-f5dd-e3e8da106cdd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.396648] env[68674]: DEBUG oslo_vmware.api [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3239470, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.476801] env[68674]: DEBUG nova.network.neutron [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 600.638291] env[68674]: DEBUG oslo_vmware.api [None req-590e8e33-94c0-4866-9706-0165d13ef7e1 tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Task: {'id': task-3239476, 'name': PowerOffVM_Task, 'duration_secs': 0.367646} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.638291] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-590e8e33-94c0-4866-9706-0165d13ef7e1 tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 600.638291] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-590e8e33-94c0-4866-9706-0165d13ef7e1 tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 600.638556] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4f050058-1340-46c9-84b2-a6d3a94a212b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.658219] env[68674]: DEBUG oslo_concurrency.lockutils [req-a56f8fe7-adb9-431d-b830-0e333168216f req-7cc45433-e5c6-4938-aa10-80ab90195e65 service nova] Releasing lock "refresh_cache-60ded0c9-7e20-4071-b5ce-9189d8d01d5c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 600.668595] env[68674]: DEBUG nova.network.neutron [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Successfully created port: 98baa11f-640b-4afd-98c6-363727b60907 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 600.680302] env[68674]: DEBUG oslo_vmware.api [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239474, 'name': Destroy_Task, 'duration_secs': 0.766577} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.681468] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Destroyed the VM [ 600.681468] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Deleting Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 600.681708] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-9d4374b0-c828-45f4-ba42-929408706097 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.689610] env[68674]: DEBUG oslo_vmware.api [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 600.689610] env[68674]: value = "task-3239479" [ 600.689610] env[68674]: _type = "Task" [ 600.689610] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.693579] env[68674]: DEBUG nova.network.neutron [-] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.705323] env[68674]: DEBUG oslo_vmware.api [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239479, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.733158] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-590e8e33-94c0-4866-9706-0165d13ef7e1 tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 600.733406] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-590e8e33-94c0-4866-9706-0165d13ef7e1 tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 600.733634] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-590e8e33-94c0-4866-9706-0165d13ef7e1 tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Deleting the datastore file [datastore1] d1c7a508-7d45-4eff-bb06-b85bfe392772 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 600.733935] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e42e0e4e-f96e-487e-bcd7-01c78070d527 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.741461] env[68674]: DEBUG nova.network.neutron [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Successfully updated port: 926b65c9-79eb-4f2d-88ef-f00c20e240f5 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 600.751589] env[68674]: DEBUG oslo_vmware.api [None req-590e8e33-94c0-4866-9706-0165d13ef7e1 tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Waiting for the task: (returnval){ [ 600.751589] env[68674]: value = "task-3239480" [ 600.751589] env[68674]: _type = "Task" [ 600.751589] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.761191] env[68674]: DEBUG oslo_vmware.api [None req-590e8e33-94c0-4866-9706-0165d13ef7e1 tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Task: {'id': task-3239480, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.762274] env[68674]: DEBUG nova.network.neutron [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.837408] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bdc5747-90e3-4bec-b4b8-c28cc2d00caa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.847039] env[68674]: DEBUG oslo_concurrency.lockutils [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 600.848521] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b17487d-fd55-4631-8ec4-0d68e9dc70e8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.891542] env[68674]: DEBUG oslo_vmware.api [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239477, 'name': Rename_Task, 'duration_secs': 0.281454} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.895080] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 600.902366] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-237aff1d-fa07-4988-8de2-dea75ec69303 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.907273] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-17d74e45-fda5-4e6b-a296-772ca42d2b3f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.917806] env[68674]: DEBUG oslo_vmware.api [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5228a40d-dd27-996e-f5dd-e3e8da106cdd, 'name': SearchDatastore_Task, 'duration_secs': 0.030515} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.918144] env[68674]: DEBUG oslo_vmware.api [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3239470, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.61852} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.921741] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 600.922030] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 600.922315] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.922482] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 600.922664] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 600.922958] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 50bb7509-b7e9-4dc3-9746-acd46010cc26/50bb7509-b7e9-4dc3-9746-acd46010cc26.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 600.924192] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 600.924192] env[68674]: DEBUG oslo_vmware.api [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Waiting for the task: (returnval){ [ 600.924192] env[68674]: value = "task-3239481" [ 600.924192] env[68674]: _type = "Task" [ 600.924192] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.924910] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5dda0869-6d53-401d-ad56-d440efb4bb47 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.927051] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b1eaca74-c3a1-4f6c-830b-5a33ffefa0fe {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.930540] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08a2f16a-302b-4805-8dcf-0149c7a2bbc9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.949224] env[68674]: DEBUG nova.compute.provider_tree [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 600.956153] env[68674]: DEBUG oslo_vmware.api [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239481, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.956546] env[68674]: DEBUG oslo_vmware.api [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Waiting for the task: (returnval){ [ 600.956546] env[68674]: value = "task-3239482" [ 600.956546] env[68674]: _type = "Task" [ 600.956546] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.958331] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 600.958484] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 600.962721] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a911f3a-e731-49c5-ba19-47f24b0d02e7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.972394] env[68674]: DEBUG oslo_vmware.api [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 600.972394] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a6ec0d-62ad-5ed7-0c66-2e3352d604de" [ 600.972394] env[68674]: _type = "Task" [ 600.972394] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.980828] env[68674]: DEBUG oslo_vmware.api [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3239482, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.992866] env[68674]: DEBUG oslo_vmware.api [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a6ec0d-62ad-5ed7-0c66-2e3352d604de, 'name': SearchDatastore_Task, 'duration_secs': 0.013427} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.993918] env[68674]: DEBUG nova.network.neutron [req-f68745f0-9fa6-4036-9d5d-119bffe4a94e req-f1e8c987-50cc-4741-a56d-7fc667f56ddd service nova] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Updated VIF entry in instance network info cache for port 21aba0b8-ff69-4bec-829d-29a8f6941877. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 600.994318] env[68674]: DEBUG nova.network.neutron [req-f68745f0-9fa6-4036-9d5d-119bffe4a94e req-f1e8c987-50cc-4741-a56d-7fc667f56ddd service nova] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Updating instance_info_cache with network_info: [{"id": "21aba0b8-ff69-4bec-829d-29a8f6941877", "address": "fa:16:3e:30:46:30", "network": {"id": "f11cd326-6319-47eb-833d-5282731628e9", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-287739122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e05a97545e94e8a9be8f382457d79b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f1b507ed-cd2d-4c09-9d96-c47bde6a7774", "external-id": "nsx-vlan-transportzone-980", "segmentation_id": 980, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21aba0b8-ff", "ovs_interfaceid": "21aba0b8-ff69-4bec-829d-29a8f6941877", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.996268] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35df97f4-2b6a-4f7d-b46c-8ed374dac3fd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.004491] env[68674]: DEBUG oslo_vmware.api [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 601.004491] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5231b291-4f13-979e-46f8-b40f072d89a3" [ 601.004491] env[68674]: _type = "Task" [ 601.004491] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.017325] env[68674]: DEBUG oslo_vmware.api [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5231b291-4f13-979e-46f8-b40f072d89a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.200120] env[68674]: INFO nova.compute.manager [-] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Took 2.49 seconds to deallocate network for instance. [ 601.218880] env[68674]: DEBUG oslo_vmware.api [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239479, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.246008] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "refresh_cache-8790d635-fec5-4dcf-8cb0-220c2edec971" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 601.246311] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquired lock "refresh_cache-8790d635-fec5-4dcf-8cb0-220c2edec971" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 601.247715] env[68674]: DEBUG nova.network.neutron [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 601.264041] env[68674]: DEBUG oslo_vmware.api [None req-590e8e33-94c0-4866-9706-0165d13ef7e1 tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Task: {'id': task-3239480, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.260829} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.264234] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-590e8e33-94c0-4866-9706-0165d13ef7e1 tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 601.264404] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-590e8e33-94c0-4866-9706-0165d13ef7e1 tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 601.264627] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-590e8e33-94c0-4866-9706-0165d13ef7e1 tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 601.264758] env[68674]: INFO nova.compute.manager [None req-590e8e33-94c0-4866-9706-0165d13ef7e1 tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Took 1.17 seconds to destroy the instance on the hypervisor. [ 601.265406] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-590e8e33-94c0-4866-9706-0165d13ef7e1 tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 601.265657] env[68674]: DEBUG nova.compute.manager [-] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 601.265758] env[68674]: DEBUG nova.network.neutron [-] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 601.269352] env[68674]: DEBUG oslo_concurrency.lockutils [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Releasing lock "refresh_cache-89ccc16e-d0e5-4f7d-985c-8693188e7ed5" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 601.269934] env[68674]: DEBUG nova.compute.manager [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 601.270147] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 601.272014] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f19469d5-cfca-427d-ac2d-43732956599f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.284089] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 601.284089] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a6b5b9b3-9b95-4e99-b09f-ac9f85013f76 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.293996] env[68674]: DEBUG oslo_vmware.api [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Waiting for the task: (returnval){ [ 601.293996] env[68674]: value = "task-3239483" [ 601.293996] env[68674]: _type = "Task" [ 601.293996] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.306496] env[68674]: DEBUG oslo_vmware.api [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239483, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.448403] env[68674]: DEBUG oslo_vmware.api [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239481, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.458678] env[68674]: DEBUG nova.scheduler.client.report [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 601.472655] env[68674]: DEBUG oslo_vmware.api [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3239482, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079893} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.473856] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 601.473856] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3005af7-5b17-42e6-be25-3d487ab4811e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.500183] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Reconfiguring VM instance instance-00000010 to attach disk [datastore1] 50bb7509-b7e9-4dc3-9746-acd46010cc26/50bb7509-b7e9-4dc3-9746-acd46010cc26.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 601.501682] env[68674]: DEBUG oslo_concurrency.lockutils [req-f68745f0-9fa6-4036-9d5d-119bffe4a94e req-f1e8c987-50cc-4741-a56d-7fc667f56ddd service nova] Releasing lock "refresh_cache-50bb7509-b7e9-4dc3-9746-acd46010cc26" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 601.502063] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-64377c88-f61f-4750-874a-f6acc5807ee3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.528417] env[68674]: DEBUG oslo_vmware.api [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5231b291-4f13-979e-46f8-b40f072d89a3, 'name': SearchDatastore_Task, 'duration_secs': 0.016661} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.528690] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 601.529185] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 60ded0c9-7e20-4071-b5ce-9189d8d01d5c/60ded0c9-7e20-4071-b5ce-9189d8d01d5c.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 601.529465] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-92e1eb50-6af6-4749-9c40-4cfa45b272b4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.533912] env[68674]: DEBUG oslo_vmware.api [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Waiting for the task: (returnval){ [ 601.533912] env[68674]: value = "task-3239484" [ 601.533912] env[68674]: _type = "Task" [ 601.533912] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.542451] env[68674]: DEBUG oslo_vmware.api [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 601.542451] env[68674]: value = "task-3239485" [ 601.542451] env[68674]: _type = "Task" [ 601.542451] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.550713] env[68674]: DEBUG oslo_vmware.api [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3239484, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.556474] env[68674]: DEBUG oslo_vmware.api [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239485, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.703670] env[68674]: DEBUG oslo_vmware.api [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239479, 'name': RemoveSnapshot_Task, 'duration_secs': 0.696057} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.704069] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Deleted Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 601.704329] env[68674]: INFO nova.compute.manager [None req-b56d01e6-9e2c-4576-9f74-3dbd6fc44e7d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Took 16.59 seconds to snapshot the instance on the hypervisor. [ 601.714444] env[68674]: DEBUG oslo_concurrency.lockutils [None req-86d8f11e-4187-4e90-9b6c-2a0a5c4d9342 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 601.803653] env[68674]: DEBUG nova.network.neutron [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 601.814452] env[68674]: DEBUG oslo_vmware.api [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239483, 'name': PowerOffVM_Task, 'duration_secs': 0.150969} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.814452] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 601.814452] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 601.814847] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5e830424-3a1d-4c16-9017-c8aa652a19f6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.849626] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 601.849857] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 601.850269] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Deleting the datastore file [datastore1] 89ccc16e-d0e5-4f7d-985c-8693188e7ed5 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 601.850569] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fe1ba2e2-5f57-4b9b-90ec-2cea02e17f99 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.867115] env[68674]: DEBUG oslo_vmware.api [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Waiting for the task: (returnval){ [ 601.867115] env[68674]: value = "task-3239487" [ 601.867115] env[68674]: _type = "Task" [ 601.867115] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.879917] env[68674]: DEBUG oslo_vmware.api [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239487, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.951511] env[68674]: DEBUG oslo_vmware.api [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239481, 'name': PowerOnVM_Task} progress is 81%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.966237] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.808s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 601.966237] env[68674]: DEBUG nova.compute.manager [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 601.971030] env[68674]: DEBUG oslo_concurrency.lockutils [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.188s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 601.973068] env[68674]: INFO nova.compute.claims [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 602.015455] env[68674]: DEBUG nova.network.neutron [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Updating instance_info_cache with network_info: [{"id": "926b65c9-79eb-4f2d-88ef-f00c20e240f5", "address": "fa:16:3e:9d:db:e2", "network": {"id": "d412f884-932c-461f-8f04-990897b04532", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-692483706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6b179855b874365964446f95f9f5a53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap926b65c9-79", "ovs_interfaceid": "926b65c9-79eb-4f2d-88ef-f00c20e240f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 602.048321] env[68674]: DEBUG oslo_vmware.api [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3239484, 'name': ReconfigVM_Task, 'duration_secs': 0.30278} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.052233] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Reconfigured VM instance instance-00000010 to attach disk [datastore1] 50bb7509-b7e9-4dc3-9746-acd46010cc26/50bb7509-b7e9-4dc3-9746-acd46010cc26.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 602.054485] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4bc9a58f-376d-43bf-901f-e9c9a2a031a4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.062733] env[68674]: DEBUG oslo_vmware.api [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239485, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.064384] env[68674]: DEBUG oslo_vmware.api [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Waiting for the task: (returnval){ [ 602.064384] env[68674]: value = "task-3239489" [ 602.064384] env[68674]: _type = "Task" [ 602.064384] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.081888] env[68674]: DEBUG oslo_vmware.api [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3239489, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.342365] env[68674]: DEBUG nova.network.neutron [-] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 602.384395] env[68674]: DEBUG oslo_vmware.api [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Task: {'id': task-3239487, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.279439} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.384673] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 602.384856] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 602.385047] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 602.385227] env[68674]: INFO nova.compute.manager [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Took 1.12 seconds to destroy the instance on the hypervisor. [ 602.385475] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 602.385665] env[68674]: DEBUG nova.compute.manager [-] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 602.386496] env[68674]: DEBUG nova.network.neutron [-] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 602.420843] env[68674]: DEBUG nova.network.neutron [-] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 602.447053] env[68674]: DEBUG oslo_vmware.api [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239481, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.472047] env[68674]: DEBUG nova.compute.utils [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 602.473923] env[68674]: DEBUG nova.compute.manager [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 602.473923] env[68674]: DEBUG nova.network.neutron [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 602.518854] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Releasing lock "refresh_cache-8790d635-fec5-4dcf-8cb0-220c2edec971" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 602.518854] env[68674]: DEBUG nova.compute.manager [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Instance network_info: |[{"id": "926b65c9-79eb-4f2d-88ef-f00c20e240f5", "address": "fa:16:3e:9d:db:e2", "network": {"id": "d412f884-932c-461f-8f04-990897b04532", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-692483706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6b179855b874365964446f95f9f5a53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap926b65c9-79", "ovs_interfaceid": "926b65c9-79eb-4f2d-88ef-f00c20e240f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 602.519935] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9d:db:e2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '08fb4857-7f9b-4f97-86ef-415341fb595d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '926b65c9-79eb-4f2d-88ef-f00c20e240f5', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 602.528954] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Creating folder: Project (f6b179855b874365964446f95f9f5a53). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 602.529567] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5b347663-29f9-4544-8591-38722a677e49 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.545703] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Created folder: Project (f6b179855b874365964446f95f9f5a53) in parent group-v647377. [ 602.545970] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Creating folder: Instances. Parent ref: group-v647432. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 602.546376] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-87e0a979-1aee-46b8-a645-9ee76344d0f5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.553468] env[68674]: DEBUG nova.policy [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6bcf04511df1423e9ec165589627d45b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74387cd2a0404c98929ed093c4f70b62', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 602.558433] env[68674]: DEBUG oslo_vmware.api [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239485, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.889572} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.558931] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 60ded0c9-7e20-4071-b5ce-9189d8d01d5c/60ded0c9-7e20-4071-b5ce-9189d8d01d5c.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 602.558931] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 602.559242] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b519e67f-7abc-416f-9093-cdf832b8e182 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.563184] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Created folder: Instances in parent group-v647432. [ 602.563184] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 602.563184] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 602.563533] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-33bb03a2-2775-486d-af90-e20f1d640381 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.582458] env[68674]: DEBUG oslo_vmware.api [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 602.582458] env[68674]: value = "task-3239492" [ 602.582458] env[68674]: _type = "Task" [ 602.582458] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.595291] env[68674]: DEBUG oslo_vmware.api [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3239489, 'name': Rename_Task, 'duration_secs': 0.221603} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.599338] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 602.599518] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 602.599518] env[68674]: value = "task-3239493" [ 602.599518] env[68674]: _type = "Task" [ 602.599518] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.599801] env[68674]: DEBUG oslo_vmware.api [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239492, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.600526] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f296c409-91f3-497f-8c1f-f234e576301d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.609107] env[68674]: DEBUG nova.compute.manager [req-240ae8a5-0cf6-48d9-864a-e6b303d3f37c req-2b2f7dd0-1c9e-4167-9ef7-9182567113d2 service nova] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Received event network-vif-plugged-926b65c9-79eb-4f2d-88ef-f00c20e240f5 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 602.609317] env[68674]: DEBUG oslo_concurrency.lockutils [req-240ae8a5-0cf6-48d9-864a-e6b303d3f37c req-2b2f7dd0-1c9e-4167-9ef7-9182567113d2 service nova] Acquiring lock "8790d635-fec5-4dcf-8cb0-220c2edec971-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 602.609516] env[68674]: DEBUG oslo_concurrency.lockutils [req-240ae8a5-0cf6-48d9-864a-e6b303d3f37c req-2b2f7dd0-1c9e-4167-9ef7-9182567113d2 service nova] Lock "8790d635-fec5-4dcf-8cb0-220c2edec971-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 602.609691] env[68674]: DEBUG oslo_concurrency.lockutils [req-240ae8a5-0cf6-48d9-864a-e6b303d3f37c req-2b2f7dd0-1c9e-4167-9ef7-9182567113d2 service nova] Lock "8790d635-fec5-4dcf-8cb0-220c2edec971-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 602.609837] env[68674]: DEBUG nova.compute.manager [req-240ae8a5-0cf6-48d9-864a-e6b303d3f37c req-2b2f7dd0-1c9e-4167-9ef7-9182567113d2 service nova] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] No waiting events found dispatching network-vif-plugged-926b65c9-79eb-4f2d-88ef-f00c20e240f5 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 602.610133] env[68674]: WARNING nova.compute.manager [req-240ae8a5-0cf6-48d9-864a-e6b303d3f37c req-2b2f7dd0-1c9e-4167-9ef7-9182567113d2 service nova] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Received unexpected event network-vif-plugged-926b65c9-79eb-4f2d-88ef-f00c20e240f5 for instance with vm_state building and task_state spawning. [ 602.610223] env[68674]: DEBUG nova.compute.manager [req-240ae8a5-0cf6-48d9-864a-e6b303d3f37c req-2b2f7dd0-1c9e-4167-9ef7-9182567113d2 service nova] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Received event network-vif-deleted-27bb7642-85fc-46b6-9ac7-8a3a6db3271a {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 602.610411] env[68674]: DEBUG nova.compute.manager [req-240ae8a5-0cf6-48d9-864a-e6b303d3f37c req-2b2f7dd0-1c9e-4167-9ef7-9182567113d2 service nova] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Received event network-changed-926b65c9-79eb-4f2d-88ef-f00c20e240f5 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 602.610568] env[68674]: DEBUG nova.compute.manager [req-240ae8a5-0cf6-48d9-864a-e6b303d3f37c req-2b2f7dd0-1c9e-4167-9ef7-9182567113d2 service nova] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Refreshing instance network info cache due to event network-changed-926b65c9-79eb-4f2d-88ef-f00c20e240f5. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 602.610766] env[68674]: DEBUG oslo_concurrency.lockutils [req-240ae8a5-0cf6-48d9-864a-e6b303d3f37c req-2b2f7dd0-1c9e-4167-9ef7-9182567113d2 service nova] Acquiring lock "refresh_cache-8790d635-fec5-4dcf-8cb0-220c2edec971" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.610896] env[68674]: DEBUG oslo_concurrency.lockutils [req-240ae8a5-0cf6-48d9-864a-e6b303d3f37c req-2b2f7dd0-1c9e-4167-9ef7-9182567113d2 service nova] Acquired lock "refresh_cache-8790d635-fec5-4dcf-8cb0-220c2edec971" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 602.611078] env[68674]: DEBUG nova.network.neutron [req-240ae8a5-0cf6-48d9-864a-e6b303d3f37c req-2b2f7dd0-1c9e-4167-9ef7-9182567113d2 service nova] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Refreshing network info cache for port 926b65c9-79eb-4f2d-88ef-f00c20e240f5 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 602.615140] env[68674]: DEBUG oslo_vmware.api [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Waiting for the task: (returnval){ [ 602.615140] env[68674]: value = "task-3239494" [ 602.615140] env[68674]: _type = "Task" [ 602.615140] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.626434] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239493, 'name': CreateVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.626838] env[68674]: DEBUG oslo_vmware.api [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3239494, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.771960] env[68674]: DEBUG nova.network.neutron [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Successfully updated port: 98baa11f-640b-4afd-98c6-363727b60907 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 602.847242] env[68674]: INFO nova.compute.manager [-] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Took 1.58 seconds to deallocate network for instance. [ 602.924967] env[68674]: DEBUG nova.network.neutron [-] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 602.950587] env[68674]: DEBUG oslo_vmware.api [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239481, 'name': PowerOnVM_Task, 'duration_secs': 1.910704} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.950587] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 602.950587] env[68674]: INFO nova.compute.manager [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Took 13.71 seconds to spawn the instance on the hypervisor. [ 602.950587] env[68674]: DEBUG nova.compute.manager [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 602.951132] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1afabd2-f54b-4fd4-a659-9a1bad88fe1d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.981600] env[68674]: DEBUG nova.compute.manager [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 603.101207] env[68674]: DEBUG oslo_vmware.api [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239492, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.140511} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.101509] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 603.102369] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99a01c7a-3962-44f3-bde4-9541c0e9281a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.111706] env[68674]: DEBUG nova.network.neutron [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Successfully created port: 28a81236-a1d4-4c87-86fb-5ad97008d9bb {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 603.143772] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Reconfiguring VM instance instance-00000011 to attach disk [datastore2] 60ded0c9-7e20-4071-b5ce-9189d8d01d5c/60ded0c9-7e20-4071-b5ce-9189d8d01d5c.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 603.145314] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239493, 'name': CreateVM_Task, 'duration_secs': 0.508842} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.149597] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-17edb4e1-e5a6-423a-8580-2cd95c9a6600 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.168350] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 603.171465] env[68674]: DEBUG oslo_concurrency.lockutils [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "045e54ff-9e2c-4b04-afac-34cb6580cb2c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 603.171758] env[68674]: DEBUG oslo_concurrency.lockutils [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "045e54ff-9e2c-4b04-afac-34cb6580cb2c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 603.172262] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.172412] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 603.172726] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 603.173804] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5806be91-1104-4925-a1e1-050e14c1985d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.179427] env[68674]: DEBUG oslo_vmware.api [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3239494, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.182660] env[68674]: DEBUG oslo_vmware.api [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 603.182660] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52da4c35-21e8-7731-b9ae-1a4082c341b0" [ 603.182660] env[68674]: _type = "Task" [ 603.182660] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.182876] env[68674]: DEBUG oslo_vmware.api [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 603.182876] env[68674]: value = "task-3239495" [ 603.182876] env[68674]: _type = "Task" [ 603.182876] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.203131] env[68674]: DEBUG oslo_vmware.api [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239495, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.203131] env[68674]: DEBUG oslo_vmware.api [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52da4c35-21e8-7731-b9ae-1a4082c341b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.277433] env[68674]: DEBUG oslo_concurrency.lockutils [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Acquiring lock "refresh_cache-f500b495-7bfb-40ff-8a10-e46ca6744902" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.277640] env[68674]: DEBUG oslo_concurrency.lockutils [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Acquired lock "refresh_cache-f500b495-7bfb-40ff-8a10-e46ca6744902" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 603.277719] env[68674]: DEBUG nova.network.neutron [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 603.355386] env[68674]: DEBUG oslo_concurrency.lockutils [None req-590e8e33-94c0-4866-9706-0165d13ef7e1 tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 603.428276] env[68674]: INFO nova.compute.manager [-] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Took 1.04 seconds to deallocate network for instance. [ 603.440405] env[68674]: DEBUG nova.compute.manager [req-f9caa90f-3455-44db-9bc2-5da54245eda8 req-61f6bb55-986a-4191-92c6-9d91c4892a46 service nova] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Received event network-changed-1b7ed5a9-214f-4011-b73e-63954c02e25e {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 603.440690] env[68674]: DEBUG nova.compute.manager [req-f9caa90f-3455-44db-9bc2-5da54245eda8 req-61f6bb55-986a-4191-92c6-9d91c4892a46 service nova] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Refreshing instance network info cache due to event network-changed-1b7ed5a9-214f-4011-b73e-63954c02e25e. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 603.440850] env[68674]: DEBUG oslo_concurrency.lockutils [req-f9caa90f-3455-44db-9bc2-5da54245eda8 req-61f6bb55-986a-4191-92c6-9d91c4892a46 service nova] Acquiring lock "refresh_cache-3d85c8c4-f09c-4f75-aff5-9a49d84ae006" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.440959] env[68674]: DEBUG oslo_concurrency.lockutils [req-f9caa90f-3455-44db-9bc2-5da54245eda8 req-61f6bb55-986a-4191-92c6-9d91c4892a46 service nova] Acquired lock "refresh_cache-3d85c8c4-f09c-4f75-aff5-9a49d84ae006" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 603.444673] env[68674]: DEBUG nova.network.neutron [req-f9caa90f-3455-44db-9bc2-5da54245eda8 req-61f6bb55-986a-4191-92c6-9d91c4892a46 service nova] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Refreshing network info cache for port 1b7ed5a9-214f-4011-b73e-63954c02e25e {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 603.480963] env[68674]: INFO nova.compute.manager [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Took 27.22 seconds to build instance. [ 603.540489] env[68674]: DEBUG nova.network.neutron [req-240ae8a5-0cf6-48d9-864a-e6b303d3f37c req-2b2f7dd0-1c9e-4167-9ef7-9182567113d2 service nova] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Updated VIF entry in instance network info cache for port 926b65c9-79eb-4f2d-88ef-f00c20e240f5. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 603.540489] env[68674]: DEBUG nova.network.neutron [req-240ae8a5-0cf6-48d9-864a-e6b303d3f37c req-2b2f7dd0-1c9e-4167-9ef7-9182567113d2 service nova] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Updating instance_info_cache with network_info: [{"id": "926b65c9-79eb-4f2d-88ef-f00c20e240f5", "address": "fa:16:3e:9d:db:e2", "network": {"id": "d412f884-932c-461f-8f04-990897b04532", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-692483706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6b179855b874365964446f95f9f5a53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap926b65c9-79", "ovs_interfaceid": "926b65c9-79eb-4f2d-88ef-f00c20e240f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.580141] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6216ac6e-a0d3-44f3-ab02-61bfe3f1eb3a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.589804] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f32fb5e7-2afe-46a7-9899-c39eea40d7b8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.624202] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b49a4635-a7b0-440a-8132-2d838ff55b77 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.634026] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d95ef84-0f4f-48b7-9959-6df18b9dec4e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.664103] env[68674]: DEBUG nova.compute.provider_tree [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 603.665620] env[68674]: DEBUG oslo_vmware.api [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3239494, 'name': PowerOnVM_Task, 'duration_secs': 0.863264} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.665902] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 603.666753] env[68674]: INFO nova.compute.manager [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Took 11.78 seconds to spawn the instance on the hypervisor. [ 603.667150] env[68674]: DEBUG nova.compute.manager [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 603.668254] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a50e7075-e83b-4a94-904b-54b6cd434d1f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.707394] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquiring lock "77fa5a89-961b-4c84-a75e-a5be0253677e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 603.707394] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lock "77fa5a89-961b-4c84-a75e-a5be0253677e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 603.707613] env[68674]: DEBUG oslo_vmware.api [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52da4c35-21e8-7731-b9ae-1a4082c341b0, 'name': SearchDatastore_Task, 'duration_secs': 0.030987} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.707664] env[68674]: DEBUG oslo_vmware.api [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239495, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.707917] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 603.708310] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 603.708728] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.708728] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 603.708863] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 603.709369] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dc82840d-349e-4b47-b34a-30c965be5efe {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.727444] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 603.727642] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 603.731018] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f2c36a5-8e19-40f9-bc0c-3c1311fd2671 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.737644] env[68674]: DEBUG oslo_vmware.api [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 603.737644] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527a4550-dad6-8af5-02fb-f16e1dee01c1" [ 603.737644] env[68674]: _type = "Task" [ 603.737644] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.748570] env[68674]: DEBUG oslo_vmware.api [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527a4550-dad6-8af5-02fb-f16e1dee01c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.869018] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "23891bad-1b63-4237-9243-78954cf67d52" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 603.869374] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "23891bad-1b63-4237-9243-78954cf67d52" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 603.883975] env[68674]: DEBUG nova.network.neutron [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 603.947469] env[68674]: DEBUG oslo_concurrency.lockutils [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 603.983688] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5dcf2ded-83b0-4801-807e-feed5fc07bb0 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Lock "357b515d-ef37-4688-969e-f894be30edb7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.730s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 604.002036] env[68674]: DEBUG nova.compute.manager [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 604.038590] env[68674]: DEBUG nova.virt.hardware [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 604.038590] env[68674]: DEBUG nova.virt.hardware [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 604.038590] env[68674]: DEBUG nova.virt.hardware [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 604.038817] env[68674]: DEBUG nova.virt.hardware [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 604.038817] env[68674]: DEBUG nova.virt.hardware [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 604.038817] env[68674]: DEBUG nova.virt.hardware [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 604.038817] env[68674]: DEBUG nova.virt.hardware [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 604.038817] env[68674]: DEBUG nova.virt.hardware [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 604.039009] env[68674]: DEBUG nova.virt.hardware [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 604.039009] env[68674]: DEBUG nova.virt.hardware [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 604.039009] env[68674]: DEBUG nova.virt.hardware [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 604.039009] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5156616a-2a97-4177-8fbf-736a111f40a2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.043589] env[68674]: DEBUG oslo_concurrency.lockutils [req-240ae8a5-0cf6-48d9-864a-e6b303d3f37c req-2b2f7dd0-1c9e-4167-9ef7-9182567113d2 service nova] Releasing lock "refresh_cache-8790d635-fec5-4dcf-8cb0-220c2edec971" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 604.050482] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72f8cb15-5239-4bfd-b63e-257099b284c4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.098752] env[68674]: DEBUG nova.network.neutron [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Updating instance_info_cache with network_info: [{"id": "98baa11f-640b-4afd-98c6-363727b60907", "address": "fa:16:3e:76:97:ba", "network": {"id": "969abe98-9706-4db7-b4aa-0b91db94a2cf", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-352238638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "1107b48f3c0c4b85ad514b915b1c89e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98baa11f-64", "ovs_interfaceid": "98baa11f-640b-4afd-98c6-363727b60907", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.170038] env[68674]: DEBUG nova.scheduler.client.report [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 604.198452] env[68674]: INFO nova.compute.manager [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Took 26.52 seconds to build instance. [ 604.212194] env[68674]: DEBUG oslo_vmware.api [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239495, 'name': ReconfigVM_Task, 'duration_secs': 0.522874} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.214706] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Reconfigured VM instance instance-00000011 to attach disk [datastore2] 60ded0c9-7e20-4071-b5ce-9189d8d01d5c/60ded0c9-7e20-4071-b5ce-9189d8d01d5c.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 604.215893] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5d5bb6b4-2fd7-4595-96af-f5e921b9b43d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.237397] env[68674]: DEBUG oslo_vmware.api [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 604.237397] env[68674]: value = "task-3239496" [ 604.237397] env[68674]: _type = "Task" [ 604.237397] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.250058] env[68674]: DEBUG oslo_vmware.api [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239496, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.254104] env[68674]: DEBUG oslo_vmware.api [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527a4550-dad6-8af5-02fb-f16e1dee01c1, 'name': SearchDatastore_Task, 'duration_secs': 0.030969} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.254891] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ee10785-1e5e-41dc-bbbf-5f375d1e49c0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.261208] env[68674]: DEBUG oslo_vmware.api [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 604.261208] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521e200a-6f21-a824-7d61-6b094d2311ee" [ 604.261208] env[68674]: _type = "Task" [ 604.261208] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.275339] env[68674]: DEBUG oslo_vmware.api [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521e200a-6f21-a824-7d61-6b094d2311ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.409505] env[68674]: DEBUG nova.network.neutron [req-f9caa90f-3455-44db-9bc2-5da54245eda8 req-61f6bb55-986a-4191-92c6-9d91c4892a46 service nova] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Updated VIF entry in instance network info cache for port 1b7ed5a9-214f-4011-b73e-63954c02e25e. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 604.410071] env[68674]: DEBUG nova.network.neutron [req-f9caa90f-3455-44db-9bc2-5da54245eda8 req-61f6bb55-986a-4191-92c6-9d91c4892a46 service nova] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Updating instance_info_cache with network_info: [{"id": "1b7ed5a9-214f-4011-b73e-63954c02e25e", "address": "fa:16:3e:4f:37:80", "network": {"id": "cd9a6296-fa96-4117-b8b5-3884d0d16745", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1543887384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61ea6bfeb37d470a970e9c98e4827ade", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b7ed5a9-21", "ovs_interfaceid": "1b7ed5a9-214f-4011-b73e-63954c02e25e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.489836] env[68674]: DEBUG nova.compute.manager [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: a62237a7-a123-4378-b655-d489ef08474b] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 604.602443] env[68674]: DEBUG oslo_concurrency.lockutils [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Releasing lock "refresh_cache-f500b495-7bfb-40ff-8a10-e46ca6744902" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 604.602443] env[68674]: DEBUG nova.compute.manager [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Instance network_info: |[{"id": "98baa11f-640b-4afd-98c6-363727b60907", "address": "fa:16:3e:76:97:ba", "network": {"id": "969abe98-9706-4db7-b4aa-0b91db94a2cf", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-352238638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "1107b48f3c0c4b85ad514b915b1c89e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98baa11f-64", "ovs_interfaceid": "98baa11f-640b-4afd-98c6-363727b60907", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 604.602811] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:97:ba', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '816c6e38-e200-4544-8c5b-9fc3e16c5761', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '98baa11f-640b-4afd-98c6-363727b60907', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 604.614490] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Creating folder: Project (1107b48f3c0c4b85ad514b915b1c89e2). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 604.614804] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b23613fb-c004-4035-a916-2297955f1002 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.628824] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Created folder: Project (1107b48f3c0c4b85ad514b915b1c89e2) in parent group-v647377. [ 604.629142] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Creating folder: Instances. Parent ref: group-v647435. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 604.629257] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0f133503-9392-499c-95ac-a1872b5e8e09 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.642925] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Created folder: Instances in parent group-v647435. [ 604.642925] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 604.642925] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 604.642925] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0d9726a3-c644-40f7-964b-c6eb99fd569a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.672406] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 604.672406] env[68674]: value = "task-3239499" [ 604.672406] env[68674]: _type = "Task" [ 604.672406] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.676519] env[68674]: DEBUG oslo_concurrency.lockutils [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.705s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 604.677053] env[68674]: DEBUG nova.compute.manager [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 604.679873] env[68674]: DEBUG oslo_concurrency.lockutils [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.058s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 604.681343] env[68674]: INFO nova.compute.claims [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 604.689990] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239499, 'name': CreateVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.704668] env[68674]: DEBUG oslo_concurrency.lockutils [None req-275f365f-03ed-43c3-a321-b20610c05a4a tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Lock "50bb7509-b7e9-4dc3-9746-acd46010cc26" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.040s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 604.749124] env[68674]: DEBUG oslo_vmware.api [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239496, 'name': Rename_Task, 'duration_secs': 0.196225} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.749991] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 604.750159] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-960f61ad-2db7-4599-bd81-6e27141f87e0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.759084] env[68674]: DEBUG oslo_vmware.api [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 604.759084] env[68674]: value = "task-3239500" [ 604.759084] env[68674]: _type = "Task" [ 604.759084] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.785479] env[68674]: DEBUG oslo_vmware.api [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521e200a-6f21-a824-7d61-6b094d2311ee, 'name': SearchDatastore_Task, 'duration_secs': 0.0154} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.785946] env[68674]: DEBUG oslo_vmware.api [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239500, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.786247] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 604.787349] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 8790d635-fec5-4dcf-8cb0-220c2edec971/8790d635-fec5-4dcf-8cb0-220c2edec971.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 604.787349] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-426ea3fc-6edc-4799-a187-ea353508e972 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.796119] env[68674]: DEBUG oslo_vmware.api [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 604.796119] env[68674]: value = "task-3239501" [ 604.796119] env[68674]: _type = "Task" [ 604.796119] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.809791] env[68674]: DEBUG oslo_vmware.api [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239501, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.913973] env[68674]: DEBUG oslo_concurrency.lockutils [req-f9caa90f-3455-44db-9bc2-5da54245eda8 req-61f6bb55-986a-4191-92c6-9d91c4892a46 service nova] Releasing lock "refresh_cache-3d85c8c4-f09c-4f75-aff5-9a49d84ae006" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 604.914267] env[68674]: DEBUG nova.compute.manager [req-f9caa90f-3455-44db-9bc2-5da54245eda8 req-61f6bb55-986a-4191-92c6-9d91c4892a46 service nova] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Received event network-vif-deleted-5da6718a-7772-42cf-869d-77f84c2984f9 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 604.937775] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d0f5b293-6ed2-4b49-8bf3-650906ddde68 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "e84db5bd-b6ec-42ef-9c34-a4160c44d973" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 604.938132] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d0f5b293-6ed2-4b49-8bf3-650906ddde68 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "e84db5bd-b6ec-42ef-9c34-a4160c44d973" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 604.938578] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d0f5b293-6ed2-4b49-8bf3-650906ddde68 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "e84db5bd-b6ec-42ef-9c34-a4160c44d973-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 604.938578] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d0f5b293-6ed2-4b49-8bf3-650906ddde68 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "e84db5bd-b6ec-42ef-9c34-a4160c44d973-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 604.939364] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d0f5b293-6ed2-4b49-8bf3-650906ddde68 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "e84db5bd-b6ec-42ef-9c34-a4160c44d973-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 604.940992] env[68674]: INFO nova.compute.manager [None req-d0f5b293-6ed2-4b49-8bf3-650906ddde68 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Terminating instance [ 604.967118] env[68674]: DEBUG nova.network.neutron [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Successfully updated port: 28a81236-a1d4-4c87-86fb-5ad97008d9bb {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 605.029011] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 605.181952] env[68674]: DEBUG nova.compute.utils [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 605.190384] env[68674]: DEBUG nova.compute.manager [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 605.190575] env[68674]: DEBUG nova.network.neutron [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 605.196406] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239499, 'name': CreateVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.207639] env[68674]: DEBUG nova.compute.manager [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 605.278137] env[68674]: DEBUG oslo_vmware.api [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239500, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.288541] env[68674]: DEBUG nova.policy [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '21bef802c4cc456986781f0ec9ce00e7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5555b26beb384c7680587cfdb67d9a10', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 605.308995] env[68674]: DEBUG oslo_vmware.api [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239501, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.365629] env[68674]: DEBUG oslo_concurrency.lockutils [None req-724e647c-44ff-4ddf-b388-72e56faac224 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Acquiring lock "9e337960-78c1-4ddb-a6f6-d6fd57dbf86d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 605.365881] env[68674]: DEBUG oslo_concurrency.lockutils [None req-724e647c-44ff-4ddf-b388-72e56faac224 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Lock "9e337960-78c1-4ddb-a6f6-d6fd57dbf86d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 605.366100] env[68674]: DEBUG oslo_concurrency.lockutils [None req-724e647c-44ff-4ddf-b388-72e56faac224 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Acquiring lock "9e337960-78c1-4ddb-a6f6-d6fd57dbf86d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 605.366284] env[68674]: DEBUG oslo_concurrency.lockutils [None req-724e647c-44ff-4ddf-b388-72e56faac224 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Lock "9e337960-78c1-4ddb-a6f6-d6fd57dbf86d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 605.366449] env[68674]: DEBUG oslo_concurrency.lockutils [None req-724e647c-44ff-4ddf-b388-72e56faac224 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Lock "9e337960-78c1-4ddb-a6f6-d6fd57dbf86d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 605.369373] env[68674]: INFO nova.compute.manager [None req-724e647c-44ff-4ddf-b388-72e56faac224 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Terminating instance [ 605.446959] env[68674]: DEBUG nova.compute.manager [None req-d0f5b293-6ed2-4b49-8bf3-650906ddde68 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 605.447405] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f5b293-6ed2-4b49-8bf3-650906ddde68 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 605.449349] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7407c448-b9e0-41ba-9d14-e118496e0b80 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.462937] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f5b293-6ed2-4b49-8bf3-650906ddde68 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 605.462937] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-33a70680-030e-4017-88c3-266fe567729c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.471693] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Acquiring lock "refresh_cache-ae945f3f-fde8-4b25-a5bd-81014fc99690" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.471693] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Acquired lock "refresh_cache-ae945f3f-fde8-4b25-a5bd-81014fc99690" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 605.471693] env[68674]: DEBUG nova.network.neutron [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 605.558621] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f5b293-6ed2-4b49-8bf3-650906ddde68 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 605.558945] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f5b293-6ed2-4b49-8bf3-650906ddde68 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 605.559113] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0f5b293-6ed2-4b49-8bf3-650906ddde68 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Deleting the datastore file [datastore2] e84db5bd-b6ec-42ef-9c34-a4160c44d973 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 605.560193] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-84bcb0d5-5154-4456-8927-d5a37808f470 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.568445] env[68674]: DEBUG oslo_vmware.api [None req-d0f5b293-6ed2-4b49-8bf3-650906ddde68 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 605.568445] env[68674]: value = "task-3239503" [ 605.568445] env[68674]: _type = "Task" [ 605.568445] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.579497] env[68674]: DEBUG oslo_vmware.api [None req-d0f5b293-6ed2-4b49-8bf3-650906ddde68 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239503, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.691574] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239499, 'name': CreateVM_Task, 'duration_secs': 0.538263} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.692079] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 605.693482] env[68674]: DEBUG oslo_concurrency.lockutils [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.694034] env[68674]: DEBUG oslo_concurrency.lockutils [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 605.694518] env[68674]: DEBUG oslo_concurrency.lockutils [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 605.695326] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3415a8c-7375-4331-b30c-f27b19935884 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.698735] env[68674]: DEBUG nova.compute.manager [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 605.722830] env[68674]: DEBUG oslo_vmware.api [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Waiting for the task: (returnval){ [ 605.722830] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528ac4ad-2dd0-dc84-8a0c-7adc568352b3" [ 605.722830] env[68674]: _type = "Task" [ 605.722830] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.740839] env[68674]: DEBUG oslo_vmware.api [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528ac4ad-2dd0-dc84-8a0c-7adc568352b3, 'name': SearchDatastore_Task, 'duration_secs': 0.013351} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.742690] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 605.743455] env[68674]: DEBUG oslo_concurrency.lockutils [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 605.743692] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 605.743951] env[68674]: DEBUG oslo_concurrency.lockutils [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.748022] env[68674]: DEBUG oslo_concurrency.lockutils [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 605.748022] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 605.748022] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a5f0639d-c2ba-43c9-9b8e-db5276eac9eb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.759812] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 605.760185] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 605.761136] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d57fba8b-0a50-424e-abf4-f930dcb08831 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.776579] env[68674]: DEBUG oslo_vmware.api [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Waiting for the task: (returnval){ [ 605.776579] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52defbc0-df1f-bd7a-a463-f6f935bbb91d" [ 605.776579] env[68674]: _type = "Task" [ 605.776579] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.780634] env[68674]: DEBUG oslo_vmware.api [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239500, 'name': PowerOnVM_Task, 'duration_secs': 0.812183} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.785948] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 605.786209] env[68674]: INFO nova.compute.manager [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Took 11.12 seconds to spawn the instance on the hypervisor. [ 605.786449] env[68674]: DEBUG nova.compute.manager [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 605.791409] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4db22002-1b77-4d4c-847a-1c6a3d3d9f39 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.801111] env[68674]: DEBUG oslo_vmware.api [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52defbc0-df1f-bd7a-a463-f6f935bbb91d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.824309] env[68674]: DEBUG oslo_vmware.api [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239501, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.683314} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.824533] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 8790d635-fec5-4dcf-8cb0-220c2edec971/8790d635-fec5-4dcf-8cb0-220c2edec971.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 605.824742] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 605.825163] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9b77080a-ad01-449b-8f2c-fdb412da5784 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.837989] env[68674]: DEBUG oslo_vmware.api [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 605.837989] env[68674]: value = "task-3239504" [ 605.837989] env[68674]: _type = "Task" [ 605.837989] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.856955] env[68674]: DEBUG oslo_vmware.api [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239504, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.881271] env[68674]: DEBUG nova.compute.manager [None req-724e647c-44ff-4ddf-b388-72e56faac224 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 605.881557] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-724e647c-44ff-4ddf-b388-72e56faac224 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 605.882883] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-976b7909-88c3-48b7-8417-a7e3e00ebf89 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.891645] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-724e647c-44ff-4ddf-b388-72e56faac224 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 605.891959] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-74f12184-39f8-45fe-9bf2-dff07e52fd4b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.901536] env[68674]: DEBUG oslo_vmware.api [None req-724e647c-44ff-4ddf-b388-72e56faac224 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Waiting for the task: (returnval){ [ 605.901536] env[68674]: value = "task-3239505" [ 605.901536] env[68674]: _type = "Task" [ 605.901536] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.914130] env[68674]: DEBUG oslo_vmware.api [None req-724e647c-44ff-4ddf-b388-72e56faac224 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Task: {'id': task-3239505, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.039216] env[68674]: DEBUG nova.network.neutron [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 606.080117] env[68674]: DEBUG oslo_vmware.api [None req-d0f5b293-6ed2-4b49-8bf3-650906ddde68 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239503, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.205774} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.080117] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0f5b293-6ed2-4b49-8bf3-650906ddde68 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 606.080426] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f5b293-6ed2-4b49-8bf3-650906ddde68 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 606.080426] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f5b293-6ed2-4b49-8bf3-650906ddde68 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 606.082369] env[68674]: INFO nova.compute.manager [None req-d0f5b293-6ed2-4b49-8bf3-650906ddde68 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Took 0.63 seconds to destroy the instance on the hypervisor. [ 606.082369] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d0f5b293-6ed2-4b49-8bf3-650906ddde68 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 606.082369] env[68674]: DEBUG nova.compute.manager [-] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 606.082369] env[68674]: DEBUG nova.network.neutron [-] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 606.161968] env[68674]: DEBUG nova.network.neutron [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Successfully created port: 0ef8fcab-d9ac-4837-bdf3-84d597114e1d {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 606.194574] env[68674]: DEBUG nova.compute.manager [req-3e1462f6-3ac3-403d-86f7-8d8a409c14c5 req-9262967c-440a-40ec-9b25-a57178b98033 service nova] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Received event network-vif-plugged-98baa11f-640b-4afd-98c6-363727b60907 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 606.194806] env[68674]: DEBUG oslo_concurrency.lockutils [req-3e1462f6-3ac3-403d-86f7-8d8a409c14c5 req-9262967c-440a-40ec-9b25-a57178b98033 service nova] Acquiring lock "f500b495-7bfb-40ff-8a10-e46ca6744902-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 606.195034] env[68674]: DEBUG oslo_concurrency.lockutils [req-3e1462f6-3ac3-403d-86f7-8d8a409c14c5 req-9262967c-440a-40ec-9b25-a57178b98033 service nova] Lock "f500b495-7bfb-40ff-8a10-e46ca6744902-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 606.195392] env[68674]: DEBUG oslo_concurrency.lockutils [req-3e1462f6-3ac3-403d-86f7-8d8a409c14c5 req-9262967c-440a-40ec-9b25-a57178b98033 service nova] Lock "f500b495-7bfb-40ff-8a10-e46ca6744902-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 606.195392] env[68674]: DEBUG nova.compute.manager [req-3e1462f6-3ac3-403d-86f7-8d8a409c14c5 req-9262967c-440a-40ec-9b25-a57178b98033 service nova] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] No waiting events found dispatching network-vif-plugged-98baa11f-640b-4afd-98c6-363727b60907 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 606.195535] env[68674]: WARNING nova.compute.manager [req-3e1462f6-3ac3-403d-86f7-8d8a409c14c5 req-9262967c-440a-40ec-9b25-a57178b98033 service nova] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Received unexpected event network-vif-plugged-98baa11f-640b-4afd-98c6-363727b60907 for instance with vm_state building and task_state spawning. [ 606.195701] env[68674]: DEBUG nova.compute.manager [req-3e1462f6-3ac3-403d-86f7-8d8a409c14c5 req-9262967c-440a-40ec-9b25-a57178b98033 service nova] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Received event network-changed-98baa11f-640b-4afd-98c6-363727b60907 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 606.195848] env[68674]: DEBUG nova.compute.manager [req-3e1462f6-3ac3-403d-86f7-8d8a409c14c5 req-9262967c-440a-40ec-9b25-a57178b98033 service nova] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Refreshing instance network info cache due to event network-changed-98baa11f-640b-4afd-98c6-363727b60907. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 606.196206] env[68674]: DEBUG oslo_concurrency.lockutils [req-3e1462f6-3ac3-403d-86f7-8d8a409c14c5 req-9262967c-440a-40ec-9b25-a57178b98033 service nova] Acquiring lock "refresh_cache-f500b495-7bfb-40ff-8a10-e46ca6744902" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.196413] env[68674]: DEBUG oslo_concurrency.lockutils [req-3e1462f6-3ac3-403d-86f7-8d8a409c14c5 req-9262967c-440a-40ec-9b25-a57178b98033 service nova] Acquired lock "refresh_cache-f500b495-7bfb-40ff-8a10-e46ca6744902" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 606.196587] env[68674]: DEBUG nova.network.neutron [req-3e1462f6-3ac3-403d-86f7-8d8a409c14c5 req-9262967c-440a-40ec-9b25-a57178b98033 service nova] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Refreshing network info cache for port 98baa11f-640b-4afd-98c6-363727b60907 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 606.238769] env[68674]: DEBUG nova.network.neutron [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Updating instance_info_cache with network_info: [{"id": "28a81236-a1d4-4c87-86fb-5ad97008d9bb", "address": "fa:16:3e:41:a3:bb", "network": {"id": "d227e0e9-9502-4493-be37-6a2c081b8bc2", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1059296354-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74387cd2a0404c98929ed093c4f70b62", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28a81236-a1", "ovs_interfaceid": "28a81236-a1d4-4c87-86fb-5ad97008d9bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.276740] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-079e2613-e444-4efd-8cc3-bc5f2a9623d6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.292969] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb2738a9-cd73-41ec-bd2c-04465703086d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.300545] env[68674]: DEBUG oslo_vmware.api [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52defbc0-df1f-bd7a-a463-f6f935bbb91d, 'name': SearchDatastore_Task, 'duration_secs': 0.024515} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.301776] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f837cf14-ccf1-4551-af8e-bf6c5e22d0d3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.338916] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66b5bc92-7604-4546-bc63-4e8e8126cffc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.343907] env[68674]: INFO nova.compute.manager [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Took 27.91 seconds to build instance. [ 606.345577] env[68674]: DEBUG oslo_vmware.api [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Waiting for the task: (returnval){ [ 606.345577] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5224e7aa-1653-7e37-8965-b34efe3b0423" [ 606.345577] env[68674]: _type = "Task" [ 606.345577] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.359728] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-376a5083-6bf0-498c-bdc8-8928617194dd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.371753] env[68674]: DEBUG oslo_vmware.api [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239504, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.187679} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.372624] env[68674]: DEBUG oslo_vmware.api [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5224e7aa-1653-7e37-8965-b34efe3b0423, 'name': SearchDatastore_Task, 'duration_secs': 0.014146} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.373511] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 606.373511] env[68674]: DEBUG oslo_concurrency.lockutils [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 606.373746] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] f500b495-7bfb-40ff-8a10-e46ca6744902/f500b495-7bfb-40ff-8a10-e46ca6744902.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 606.374619] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa5abb88-e1bf-47b2-bd84-556b7069a6b5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.377623] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2f48d095-1ff9-4101-806b-9be02b445db8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.388915] env[68674]: DEBUG nova.compute.provider_tree [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 606.415706] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Reconfiguring VM instance instance-00000012 to attach disk [datastore2] 8790d635-fec5-4dcf-8cb0-220c2edec971/8790d635-fec5-4dcf-8cb0-220c2edec971.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 606.421402] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a66f3f97-6427-4aa0-afc8-743c53138c15 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.437123] env[68674]: DEBUG oslo_vmware.api [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Waiting for the task: (returnval){ [ 606.437123] env[68674]: value = "task-3239506" [ 606.437123] env[68674]: _type = "Task" [ 606.437123] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.448543] env[68674]: DEBUG oslo_vmware.api [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 606.448543] env[68674]: value = "task-3239507" [ 606.448543] env[68674]: _type = "Task" [ 606.448543] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.448929] env[68674]: DEBUG oslo_vmware.api [None req-724e647c-44ff-4ddf-b388-72e56faac224 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Task: {'id': task-3239505, 'name': PowerOffVM_Task, 'duration_secs': 0.266896} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.449505] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-724e647c-44ff-4ddf-b388-72e56faac224 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 606.449695] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-724e647c-44ff-4ddf-b388-72e56faac224 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 606.452838] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5b1b3129-622a-4a74-bd4f-79be07900d1a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.457777] env[68674]: DEBUG oslo_vmware.api [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Task: {'id': task-3239506, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.464319] env[68674]: DEBUG oslo_vmware.api [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239507, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.534702] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-724e647c-44ff-4ddf-b388-72e56faac224 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 606.534922] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-724e647c-44ff-4ddf-b388-72e56faac224 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 606.535120] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-724e647c-44ff-4ddf-b388-72e56faac224 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Deleting the datastore file [datastore1] 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 606.535407] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-47e9764d-8d9a-48ec-aa67-c98bebae319e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.546033] env[68674]: DEBUG oslo_vmware.api [None req-724e647c-44ff-4ddf-b388-72e56faac224 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Waiting for the task: (returnval){ [ 606.546033] env[68674]: value = "task-3239509" [ 606.546033] env[68674]: _type = "Task" [ 606.546033] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.554777] env[68674]: DEBUG oslo_vmware.api [None req-724e647c-44ff-4ddf-b388-72e56faac224 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Task: {'id': task-3239509, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.725048] env[68674]: DEBUG nova.compute.manager [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 606.755435] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Releasing lock "refresh_cache-ae945f3f-fde8-4b25-a5bd-81014fc99690" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 606.755871] env[68674]: DEBUG nova.compute.manager [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Instance network_info: |[{"id": "28a81236-a1d4-4c87-86fb-5ad97008d9bb", "address": "fa:16:3e:41:a3:bb", "network": {"id": "d227e0e9-9502-4493-be37-6a2c081b8bc2", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1059296354-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74387cd2a0404c98929ed093c4f70b62", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28a81236-a1", "ovs_interfaceid": "28a81236-a1d4-4c87-86fb-5ad97008d9bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 606.757153] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:41:a3:bb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92f3cfd6-c130-4390-8910-865fbc42afd1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '28a81236-a1d4-4c87-86fb-5ad97008d9bb', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 606.769709] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Creating folder: Project (74387cd2a0404c98929ed093c4f70b62). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 606.777261] env[68674]: DEBUG nova.virt.hardware [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 606.777261] env[68674]: DEBUG nova.virt.hardware [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 606.777261] env[68674]: DEBUG nova.virt.hardware [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 606.778038] env[68674]: DEBUG nova.virt.hardware [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 606.778038] env[68674]: DEBUG nova.virt.hardware [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 606.778038] env[68674]: DEBUG nova.virt.hardware [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 606.778038] env[68674]: DEBUG nova.virt.hardware [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 606.778038] env[68674]: DEBUG nova.virt.hardware [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 606.778343] env[68674]: DEBUG nova.virt.hardware [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 606.778343] env[68674]: DEBUG nova.virt.hardware [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 606.778343] env[68674]: DEBUG nova.virt.hardware [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 606.779720] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8e2eb458-3fde-43e4-b9b5-32daa4d46bcd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.785712] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9687649b-7141-4e5c-a19c-b7b8e582ab6a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.795132] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62e1981b-9f97-4941-978f-f3abe02836dc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.818257] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Created folder: Project (74387cd2a0404c98929ed093c4f70b62) in parent group-v647377. [ 606.818545] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Creating folder: Instances. Parent ref: group-v647440. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 606.819995] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-660a8ee0-8ee9-46ea-8ecc-9615cb28bb33 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.825167] env[68674]: DEBUG oslo_concurrency.lockutils [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Acquiring lock "6278d756-139c-4fcd-bf31-304c978d6682" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 606.825420] env[68674]: DEBUG oslo_concurrency.lockutils [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Lock "6278d756-139c-4fcd-bf31-304c978d6682" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 606.839792] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Created folder: Instances in parent group-v647440. [ 606.840128] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 606.840477] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 606.841144] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e43aa5c6-4c4e-42de-ab92-0a556a45a004 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.857287] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c188bd16-19db-428f-9aea-1c3961c87d74 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Lock "60ded0c9-7e20-4071-b5ce-9189d8d01d5c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.440s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 606.868017] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 606.868017] env[68674]: value = "task-3239515" [ 606.868017] env[68674]: _type = "Task" [ 606.868017] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.878977] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239515, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.892988] env[68674]: DEBUG nova.scheduler.client.report [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 606.947977] env[68674]: DEBUG oslo_vmware.api [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Task: {'id': task-3239506, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.961681] env[68674]: DEBUG oslo_vmware.api [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239507, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.058789] env[68674]: DEBUG oslo_vmware.api [None req-724e647c-44ff-4ddf-b388-72e56faac224 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Task: {'id': task-3239509, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.178106] env[68674]: DEBUG nova.network.neutron [req-3e1462f6-3ac3-403d-86f7-8d8a409c14c5 req-9262967c-440a-40ec-9b25-a57178b98033 service nova] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Updated VIF entry in instance network info cache for port 98baa11f-640b-4afd-98c6-363727b60907. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 607.178546] env[68674]: DEBUG nova.network.neutron [req-3e1462f6-3ac3-403d-86f7-8d8a409c14c5 req-9262967c-440a-40ec-9b25-a57178b98033 service nova] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Updating instance_info_cache with network_info: [{"id": "98baa11f-640b-4afd-98c6-363727b60907", "address": "fa:16:3e:76:97:ba", "network": {"id": "969abe98-9706-4db7-b4aa-0b91db94a2cf", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-352238638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "1107b48f3c0c4b85ad514b915b1c89e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98baa11f-64", "ovs_interfaceid": "98baa11f-640b-4afd-98c6-363727b60907", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.232661] env[68674]: DEBUG nova.network.neutron [-] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.338914] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "d167585b-11f4-462c-b12e-c6a440c1476a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 607.339211] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "d167585b-11f4-462c-b12e-c6a440c1476a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 607.360524] env[68674]: DEBUG nova.compute.manager [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 607.383864] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239515, 'name': CreateVM_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.410045] env[68674]: DEBUG oslo_concurrency.lockutils [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.730s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 607.413469] env[68674]: DEBUG nova.compute.manager [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 607.416390] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 10.857s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 607.416793] env[68674]: DEBUG nova.objects.instance [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68674) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 607.448778] env[68674]: DEBUG oslo_vmware.api [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Task: {'id': task-3239506, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.784958} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.450335] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] f500b495-7bfb-40ff-8a10-e46ca6744902/f500b495-7bfb-40ff-8a10-e46ca6744902.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 607.450923] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 607.452786] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-05b6297a-0fae-46cf-ba18-66e2889350ac {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.465115] env[68674]: DEBUG oslo_vmware.api [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239507, 'name': ReconfigVM_Task, 'duration_secs': 0.698235} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.466417] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Reconfigured VM instance instance-00000012 to attach disk [datastore2] 8790d635-fec5-4dcf-8cb0-220c2edec971/8790d635-fec5-4dcf-8cb0-220c2edec971.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 607.467323] env[68674]: DEBUG oslo_vmware.api [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Waiting for the task: (returnval){ [ 607.467323] env[68674]: value = "task-3239516" [ 607.467323] env[68674]: _type = "Task" [ 607.467323] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.467534] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0e4249d3-0e5c-4f2e-8deb-21677443d6a9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.484721] env[68674]: DEBUG oslo_vmware.api [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Task: {'id': task-3239516, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.485260] env[68674]: DEBUG oslo_vmware.api [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 607.485260] env[68674]: value = "task-3239517" [ 607.485260] env[68674]: _type = "Task" [ 607.485260] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.495149] env[68674]: DEBUG oslo_vmware.api [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239517, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.556529] env[68674]: DEBUG oslo_vmware.api [None req-724e647c-44ff-4ddf-b388-72e56faac224 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Task: {'id': task-3239509, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.657418} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.556529] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-724e647c-44ff-4ddf-b388-72e56faac224 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 607.556529] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-724e647c-44ff-4ddf-b388-72e56faac224 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 607.556529] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-724e647c-44ff-4ddf-b388-72e56faac224 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 607.556807] env[68674]: INFO nova.compute.manager [None req-724e647c-44ff-4ddf-b388-72e56faac224 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Took 1.68 seconds to destroy the instance on the hypervisor. [ 607.556891] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-724e647c-44ff-4ddf-b388-72e56faac224 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 607.557998] env[68674]: DEBUG nova.compute.manager [-] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 607.557998] env[68674]: DEBUG nova.network.neutron [-] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 607.681594] env[68674]: DEBUG oslo_concurrency.lockutils [req-3e1462f6-3ac3-403d-86f7-8d8a409c14c5 req-9262967c-440a-40ec-9b25-a57178b98033 service nova] Releasing lock "refresh_cache-f500b495-7bfb-40ff-8a10-e46ca6744902" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 607.736729] env[68674]: INFO nova.compute.manager [-] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Took 1.66 seconds to deallocate network for instance. [ 607.885444] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239515, 'name': CreateVM_Task, 'duration_secs': 0.723618} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.885716] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 607.886716] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.886716] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 607.887269] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 607.887622] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9dc8a855-ef89-428e-a075-d9a40b0bea31 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.894060] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 607.896226] env[68674]: DEBUG oslo_vmware.api [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Waiting for the task: (returnval){ [ 607.896226] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5202ae1f-4f47-4e1e-549f-8be339fca7e6" [ 607.896226] env[68674]: _type = "Task" [ 607.896226] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.905904] env[68674]: DEBUG oslo_vmware.api [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5202ae1f-4f47-4e1e-549f-8be339fca7e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.923012] env[68674]: DEBUG nova.compute.utils [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 607.927051] env[68674]: DEBUG nova.compute.manager [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 607.927419] env[68674]: DEBUG nova.network.neutron [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 607.980672] env[68674]: DEBUG oslo_vmware.api [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Task: {'id': task-3239516, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.140381} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.980947] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 607.981771] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d06c18a4-d3db-4c68-8b7b-242781cc7d64 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.012577] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] f500b495-7bfb-40ff-8a10-e46ca6744902/f500b495-7bfb-40ff-8a10-e46ca6744902.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 608.014905] env[68674]: DEBUG nova.policy [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'abfd3423bbdb4ee9a94b31fb0f7aa860', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3fa7abd14180453bb12e9dd5fc24523f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 608.017164] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db1d87df-3854-478c-85da-bf45ec95f4fc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.038830] env[68674]: DEBUG oslo_vmware.api [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239517, 'name': Rename_Task, 'duration_secs': 0.431301} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.039570] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 608.039860] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e3bc8926-919a-4d18-934c-c8abc4b22045 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.046529] env[68674]: DEBUG oslo_vmware.api [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Waiting for the task: (returnval){ [ 608.046529] env[68674]: value = "task-3239518" [ 608.046529] env[68674]: _type = "Task" [ 608.046529] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.047974] env[68674]: DEBUG oslo_vmware.api [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 608.047974] env[68674]: value = "task-3239519" [ 608.047974] env[68674]: _type = "Task" [ 608.047974] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.059584] env[68674]: DEBUG oslo_vmware.api [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Task: {'id': task-3239518, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.062948] env[68674]: DEBUG oslo_vmware.api [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239519, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.134926] env[68674]: DEBUG nova.network.neutron [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Successfully updated port: 0ef8fcab-d9ac-4837-bdf3-84d597114e1d {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 608.247427] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d0f5b293-6ed2-4b49-8bf3-650906ddde68 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 608.410683] env[68674]: DEBUG oslo_vmware.api [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5202ae1f-4f47-4e1e-549f-8be339fca7e6, 'name': SearchDatastore_Task, 'duration_secs': 0.023283} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.411286] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 608.413448] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 608.413448] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.413448] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 608.413448] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 608.413448] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d2252367-340a-4be2-9864-f9a4ad5f08b8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.420795] env[68674]: DEBUG oslo_concurrency.lockutils [None req-98605eeb-610d-452b-a13b-cc5931655fd8 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquiring lock "e75d2bc7-f356-4443-9641-d9ebf35843cd" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 608.421474] env[68674]: DEBUG oslo_concurrency.lockutils [None req-98605eeb-610d-452b-a13b-cc5931655fd8 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lock "e75d2bc7-f356-4443-9641-d9ebf35843cd" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 608.421474] env[68674]: DEBUG nova.compute.manager [None req-98605eeb-610d-452b-a13b-cc5931655fd8 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 608.423728] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b8d835a-30ad-4776-8e4d-cd9465a19608 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.441022] env[68674]: DEBUG nova.compute.manager [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 608.442056] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b5954834-a98a-4dc1-b452-e50c0776aa2f tempest-ServersAdmin275Test-1039536994 tempest-ServersAdmin275Test-1039536994-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.025s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 608.442634] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Acquiring lock "0f618d12-dc7b-4739-8ace-9453a7175d75" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 608.442845] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Lock "0f618d12-dc7b-4739-8ace-9453a7175d75" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 608.443078] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 608.443472] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 608.445431] env[68674]: DEBUG oslo_concurrency.lockutils [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.520s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 608.447049] env[68674]: INFO nova.compute.claims [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 608.449855] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f34bf429-e570-4a04-b00f-b21b1b0351a1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.462831] env[68674]: DEBUG nova.compute.manager [None req-98605eeb-610d-452b-a13b-cc5931655fd8 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68674) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 608.463617] env[68674]: DEBUG nova.objects.instance [None req-98605eeb-610d-452b-a13b-cc5931655fd8 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lazy-loading 'flavor' on Instance uuid e75d2bc7-f356-4443-9641-d9ebf35843cd {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 608.469524] env[68674]: DEBUG oslo_vmware.api [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Waiting for the task: (returnval){ [ 608.469524] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c9505b-fd58-e6c2-8a0c-7bf12af016a6" [ 608.469524] env[68674]: _type = "Task" [ 608.469524] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.479694] env[68674]: DEBUG oslo_vmware.api [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c9505b-fd58-e6c2-8a0c-7bf12af016a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.523868] env[68674]: DEBUG nova.network.neutron [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Successfully created port: b8d668c5-2f06-454e-842e-fdbe52dffa5e {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 608.571473] env[68674]: DEBUG oslo_vmware.api [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Task: {'id': task-3239518, 'name': ReconfigVM_Task, 'duration_secs': 0.501194} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.571796] env[68674]: DEBUG oslo_vmware.api [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239519, 'name': PowerOnVM_Task} progress is 81%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.572074] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Reconfigured VM instance instance-00000013 to attach disk [datastore1] f500b495-7bfb-40ff-8a10-e46ca6744902/f500b495-7bfb-40ff-8a10-e46ca6744902.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 608.572702] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-82be624b-0972-4c01-b655-c4e49b755330 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.581443] env[68674]: DEBUG oslo_vmware.api [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Waiting for the task: (returnval){ [ 608.581443] env[68674]: value = "task-3239520" [ 608.581443] env[68674]: _type = "Task" [ 608.581443] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.595280] env[68674]: DEBUG oslo_vmware.api [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Task: {'id': task-3239520, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.639163] env[68674]: DEBUG oslo_concurrency.lockutils [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Acquiring lock "refresh_cache-fa89e0b5-590d-43fb-bb11-02f8fdee0c2f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.639163] env[68674]: DEBUG oslo_concurrency.lockutils [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Acquired lock "refresh_cache-fa89e0b5-590d-43fb-bb11-02f8fdee0c2f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 608.639163] env[68674]: DEBUG nova.network.neutron [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 608.782122] env[68674]: DEBUG nova.compute.manager [req-f4e8400f-a86f-4196-9db7-90727e41033f req-45131e75-7e3f-4f72-9e55-ed755a8e89a9 service nova] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Received event network-vif-plugged-28a81236-a1d4-4c87-86fb-5ad97008d9bb {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 608.782503] env[68674]: DEBUG oslo_concurrency.lockutils [req-f4e8400f-a86f-4196-9db7-90727e41033f req-45131e75-7e3f-4f72-9e55-ed755a8e89a9 service nova] Acquiring lock "ae945f3f-fde8-4b25-a5bd-81014fc99690-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 608.782881] env[68674]: DEBUG oslo_concurrency.lockutils [req-f4e8400f-a86f-4196-9db7-90727e41033f req-45131e75-7e3f-4f72-9e55-ed755a8e89a9 service nova] Lock "ae945f3f-fde8-4b25-a5bd-81014fc99690-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 608.784202] env[68674]: DEBUG oslo_concurrency.lockutils [req-f4e8400f-a86f-4196-9db7-90727e41033f req-45131e75-7e3f-4f72-9e55-ed755a8e89a9 service nova] Lock "ae945f3f-fde8-4b25-a5bd-81014fc99690-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 608.784202] env[68674]: DEBUG nova.compute.manager [req-f4e8400f-a86f-4196-9db7-90727e41033f req-45131e75-7e3f-4f72-9e55-ed755a8e89a9 service nova] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] No waiting events found dispatching network-vif-plugged-28a81236-a1d4-4c87-86fb-5ad97008d9bb {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 608.784202] env[68674]: WARNING nova.compute.manager [req-f4e8400f-a86f-4196-9db7-90727e41033f req-45131e75-7e3f-4f72-9e55-ed755a8e89a9 service nova] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Received unexpected event network-vif-plugged-28a81236-a1d4-4c87-86fb-5ad97008d9bb for instance with vm_state building and task_state spawning. [ 608.784429] env[68674]: DEBUG nova.compute.manager [req-f4e8400f-a86f-4196-9db7-90727e41033f req-45131e75-7e3f-4f72-9e55-ed755a8e89a9 service nova] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Received event network-changed-28a81236-a1d4-4c87-86fb-5ad97008d9bb {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 608.785780] env[68674]: DEBUG nova.compute.manager [req-f4e8400f-a86f-4196-9db7-90727e41033f req-45131e75-7e3f-4f72-9e55-ed755a8e89a9 service nova] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Refreshing instance network info cache due to event network-changed-28a81236-a1d4-4c87-86fb-5ad97008d9bb. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 608.785780] env[68674]: DEBUG oslo_concurrency.lockutils [req-f4e8400f-a86f-4196-9db7-90727e41033f req-45131e75-7e3f-4f72-9e55-ed755a8e89a9 service nova] Acquiring lock "refresh_cache-ae945f3f-fde8-4b25-a5bd-81014fc99690" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.785780] env[68674]: DEBUG oslo_concurrency.lockutils [req-f4e8400f-a86f-4196-9db7-90727e41033f req-45131e75-7e3f-4f72-9e55-ed755a8e89a9 service nova] Acquired lock "refresh_cache-ae945f3f-fde8-4b25-a5bd-81014fc99690" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 608.785780] env[68674]: DEBUG nova.network.neutron [req-f4e8400f-a86f-4196-9db7-90727e41033f req-45131e75-7e3f-4f72-9e55-ed755a8e89a9 service nova] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Refreshing network info cache for port 28a81236-a1d4-4c87-86fb-5ad97008d9bb {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 608.844373] env[68674]: DEBUG nova.network.neutron [-] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.954534] env[68674]: DEBUG nova.compute.manager [req-5173847a-36da-4f85-bfbd-f6faba6dcaee req-31933d4b-7491-4480-b562-00134b0a4775 service nova] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Received event network-vif-deleted-2bfad8ae-7c3e-4d21-9729-4f11937e5698 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 608.988762] env[68674]: DEBUG oslo_vmware.api [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c9505b-fd58-e6c2-8a0c-7bf12af016a6, 'name': SearchDatastore_Task, 'duration_secs': 0.015451} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.989310] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f552525-87af-4401-a6bb-2d92bb4577e4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.997822] env[68674]: DEBUG oslo_vmware.api [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Waiting for the task: (returnval){ [ 608.997822] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527eb086-81fd-d7b0-9a57-d55c2a0cd676" [ 608.997822] env[68674]: _type = "Task" [ 608.997822] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.012896] env[68674]: DEBUG oslo_vmware.api [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527eb086-81fd-d7b0-9a57-d55c2a0cd676, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.065053] env[68674]: DEBUG oslo_vmware.api [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239519, 'name': PowerOnVM_Task, 'duration_secs': 0.968052} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.065184] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 609.065379] env[68674]: INFO nova.compute.manager [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Took 11.62 seconds to spawn the instance on the hypervisor. [ 609.065664] env[68674]: DEBUG nova.compute.manager [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 609.066753] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec4df624-3f5b-47aa-ae10-88d93aee89de {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.094715] env[68674]: DEBUG oslo_vmware.api [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Task: {'id': task-3239520, 'name': Rename_Task, 'duration_secs': 0.357145} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.095023] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 609.095281] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1aa6737b-fc17-4de3-a6fc-d627b0c38a7b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.107241] env[68674]: DEBUG oslo_vmware.api [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Waiting for the task: (returnval){ [ 609.107241] env[68674]: value = "task-3239521" [ 609.107241] env[68674]: _type = "Task" [ 609.107241] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.116770] env[68674]: DEBUG oslo_vmware.api [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Task: {'id': task-3239521, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.235382] env[68674]: DEBUG nova.network.neutron [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 609.346641] env[68674]: INFO nova.compute.manager [-] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Took 1.79 seconds to deallocate network for instance. [ 609.452451] env[68674]: DEBUG nova.compute.manager [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 609.481036] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-98605eeb-610d-452b-a13b-cc5931655fd8 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 609.484654] env[68674]: DEBUG nova.virt.hardware [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 609.484962] env[68674]: DEBUG nova.virt.hardware [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 609.485244] env[68674]: DEBUG nova.virt.hardware [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 609.485440] env[68674]: DEBUG nova.virt.hardware [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 609.485588] env[68674]: DEBUG nova.virt.hardware [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 609.485808] env[68674]: DEBUG nova.virt.hardware [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 609.485979] env[68674]: DEBUG nova.virt.hardware [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 609.486199] env[68674]: DEBUG nova.virt.hardware [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 609.486329] env[68674]: DEBUG nova.virt.hardware [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 609.486494] env[68674]: DEBUG nova.virt.hardware [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 609.486660] env[68674]: DEBUG nova.virt.hardware [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 609.486950] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e3b8ca4a-beaf-49aa-b476-4e646aabe92c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.489834] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4df8cce-f5e6-4131-9a73-ea8689324605 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.504145] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10f990e2-ed02-49ea-a78e-717e0a6561ca {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.512392] env[68674]: DEBUG oslo_vmware.api [None req-98605eeb-610d-452b-a13b-cc5931655fd8 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for the task: (returnval){ [ 609.512392] env[68674]: value = "task-3239522" [ 609.512392] env[68674]: _type = "Task" [ 609.512392] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.530800] env[68674]: DEBUG oslo_vmware.api [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527eb086-81fd-d7b0-9a57-d55c2a0cd676, 'name': SearchDatastore_Task, 'duration_secs': 0.017177} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.534444] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 609.534775] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] ae945f3f-fde8-4b25-a5bd-81014fc99690/ae945f3f-fde8-4b25-a5bd-81014fc99690.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 609.535055] env[68674]: DEBUG oslo_vmware.api [None req-98605eeb-610d-452b-a13b-cc5931655fd8 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239522, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.538255] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ee88c7bc-9851-4088-a0b6-f3cd5208e65c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.557046] env[68674]: DEBUG oslo_vmware.api [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Waiting for the task: (returnval){ [ 609.557046] env[68674]: value = "task-3239523" [ 609.557046] env[68674]: _type = "Task" [ 609.557046] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.584831] env[68674]: DEBUG oslo_vmware.api [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Task: {'id': task-3239523, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.596046] env[68674]: INFO nova.compute.manager [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Took 28.27 seconds to build instance. [ 609.630246] env[68674]: DEBUG oslo_vmware.api [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Task: {'id': task-3239521, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.729680] env[68674]: DEBUG nova.network.neutron [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Updating instance_info_cache with network_info: [{"id": "0ef8fcab-d9ac-4837-bdf3-84d597114e1d", "address": "fa:16:3e:01:82:11", "network": {"id": "c49e5bae-b7f8-4568-9b4e-78ce6b3506e1", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-2053822217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5555b26beb384c7680587cfdb67d9a10", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ef8fcab-d9", "ovs_interfaceid": "0ef8fcab-d9ac-4837-bdf3-84d597114e1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.819717] env[68674]: DEBUG nova.network.neutron [req-f4e8400f-a86f-4196-9db7-90727e41033f req-45131e75-7e3f-4f72-9e55-ed755a8e89a9 service nova] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Updated VIF entry in instance network info cache for port 28a81236-a1d4-4c87-86fb-5ad97008d9bb. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 609.820333] env[68674]: DEBUG nova.network.neutron [req-f4e8400f-a86f-4196-9db7-90727e41033f req-45131e75-7e3f-4f72-9e55-ed755a8e89a9 service nova] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Updating instance_info_cache with network_info: [{"id": "28a81236-a1d4-4c87-86fb-5ad97008d9bb", "address": "fa:16:3e:41:a3:bb", "network": {"id": "d227e0e9-9502-4493-be37-6a2c081b8bc2", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1059296354-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74387cd2a0404c98929ed093c4f70b62", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28a81236-a1", "ovs_interfaceid": "28a81236-a1d4-4c87-86fb-5ad97008d9bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.858340] env[68674]: DEBUG oslo_concurrency.lockutils [None req-724e647c-44ff-4ddf-b388-72e56faac224 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 610.032095] env[68674]: DEBUG oslo_vmware.api [None req-98605eeb-610d-452b-a13b-cc5931655fd8 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239522, 'name': PowerOffVM_Task, 'duration_secs': 0.378461} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.033777] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-98605eeb-610d-452b-a13b-cc5931655fd8 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 610.034341] env[68674]: DEBUG nova.compute.manager [None req-98605eeb-610d-452b-a13b-cc5931655fd8 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 610.034956] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9f36052-22c3-4b07-91dd-cdc5681053a9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.079234] env[68674]: DEBUG oslo_vmware.api [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Task: {'id': task-3239523, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.102617] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6d7bba78-83e7-4fc7-bdb9-40fc4ec92c03 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "8790d635-fec5-4dcf-8cb0-220c2edec971" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.945s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 610.125433] env[68674]: DEBUG oslo_vmware.api [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Task: {'id': task-3239521, 'name': PowerOnVM_Task, 'duration_secs': 0.738649} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.130509] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 610.130509] env[68674]: INFO nova.compute.manager [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Took 9.98 seconds to spawn the instance on the hypervisor. [ 610.130509] env[68674]: DEBUG nova.compute.manager [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 610.131570] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4ef4fa0-6a0e-4451-9f89-1a86919301dd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.233721] env[68674]: DEBUG oslo_concurrency.lockutils [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Releasing lock "refresh_cache-fa89e0b5-590d-43fb-bb11-02f8fdee0c2f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 610.233721] env[68674]: DEBUG nova.compute.manager [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Instance network_info: |[{"id": "0ef8fcab-d9ac-4837-bdf3-84d597114e1d", "address": "fa:16:3e:01:82:11", "network": {"id": "c49e5bae-b7f8-4568-9b4e-78ce6b3506e1", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-2053822217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5555b26beb384c7680587cfdb67d9a10", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ef8fcab-d9", "ovs_interfaceid": "0ef8fcab-d9ac-4837-bdf3-84d597114e1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 610.233985] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:01:82:11', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ae18b41f-e73c-44f1-83dd-467c080944f4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0ef8fcab-d9ac-4837-bdf3-84d597114e1d', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 610.241419] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Creating folder: Project (5555b26beb384c7680587cfdb67d9a10). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 610.241680] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6b3f0956-400d-4949-9136-f209c61ad2ed {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.260803] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79c9823c-06f1-4275-9f8c-c78053c11c62 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.265988] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Created folder: Project (5555b26beb384c7680587cfdb67d9a10) in parent group-v647377. [ 610.266256] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Creating folder: Instances. Parent ref: group-v647444. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 610.266934] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-04ba3ae9-f235-4614-a4ee-8b74a0622dbf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.272444] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a7b784c-b0e4-4a2d-845c-0f76cca85b2b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.280511] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Created folder: Instances in parent group-v647444. [ 610.280773] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 610.312846] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 610.312995] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c13f1445-8878-4035-936a-205ae8f60569 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.330779] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef830533-6ed0-4825-b10c-d9a350a372b2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.332754] env[68674]: DEBUG oslo_concurrency.lockutils [req-f4e8400f-a86f-4196-9db7-90727e41033f req-45131e75-7e3f-4f72-9e55-ed755a8e89a9 service nova] Releasing lock "refresh_cache-ae945f3f-fde8-4b25-a5bd-81014fc99690" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 610.333026] env[68674]: DEBUG nova.compute.manager [req-f4e8400f-a86f-4196-9db7-90727e41033f req-45131e75-7e3f-4f72-9e55-ed755a8e89a9 service nova] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Received event network-vif-deleted-3c8004db-9f93-48c8-9861-4f8e8f1cd2c9 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 610.333221] env[68674]: DEBUG nova.compute.manager [req-f4e8400f-a86f-4196-9db7-90727e41033f req-45131e75-7e3f-4f72-9e55-ed755a8e89a9 service nova] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Received event network-vif-plugged-0ef8fcab-d9ac-4837-bdf3-84d597114e1d {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 610.333763] env[68674]: DEBUG oslo_concurrency.lockutils [req-f4e8400f-a86f-4196-9db7-90727e41033f req-45131e75-7e3f-4f72-9e55-ed755a8e89a9 service nova] Acquiring lock "fa89e0b5-590d-43fb-bb11-02f8fdee0c2f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 610.333763] env[68674]: DEBUG oslo_concurrency.lockutils [req-f4e8400f-a86f-4196-9db7-90727e41033f req-45131e75-7e3f-4f72-9e55-ed755a8e89a9 service nova] Lock "fa89e0b5-590d-43fb-bb11-02f8fdee0c2f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 610.333763] env[68674]: DEBUG oslo_concurrency.lockutils [req-f4e8400f-a86f-4196-9db7-90727e41033f req-45131e75-7e3f-4f72-9e55-ed755a8e89a9 service nova] Lock "fa89e0b5-590d-43fb-bb11-02f8fdee0c2f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 610.334326] env[68674]: DEBUG nova.compute.manager [req-f4e8400f-a86f-4196-9db7-90727e41033f req-45131e75-7e3f-4f72-9e55-ed755a8e89a9 service nova] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] No waiting events found dispatching network-vif-plugged-0ef8fcab-d9ac-4837-bdf3-84d597114e1d {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 610.334326] env[68674]: WARNING nova.compute.manager [req-f4e8400f-a86f-4196-9db7-90727e41033f req-45131e75-7e3f-4f72-9e55-ed755a8e89a9 service nova] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Received unexpected event network-vif-plugged-0ef8fcab-d9ac-4837-bdf3-84d597114e1d for instance with vm_state building and task_state spawning. [ 610.334326] env[68674]: DEBUG nova.compute.manager [req-f4e8400f-a86f-4196-9db7-90727e41033f req-45131e75-7e3f-4f72-9e55-ed755a8e89a9 service nova] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Received event network-changed-0ef8fcab-d9ac-4837-bdf3-84d597114e1d {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 610.334743] env[68674]: DEBUG nova.compute.manager [req-f4e8400f-a86f-4196-9db7-90727e41033f req-45131e75-7e3f-4f72-9e55-ed755a8e89a9 service nova] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Refreshing instance network info cache due to event network-changed-0ef8fcab-d9ac-4837-bdf3-84d597114e1d. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 610.334743] env[68674]: DEBUG oslo_concurrency.lockutils [req-f4e8400f-a86f-4196-9db7-90727e41033f req-45131e75-7e3f-4f72-9e55-ed755a8e89a9 service nova] Acquiring lock "refresh_cache-fa89e0b5-590d-43fb-bb11-02f8fdee0c2f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.334743] env[68674]: DEBUG oslo_concurrency.lockutils [req-f4e8400f-a86f-4196-9db7-90727e41033f req-45131e75-7e3f-4f72-9e55-ed755a8e89a9 service nova] Acquired lock "refresh_cache-fa89e0b5-590d-43fb-bb11-02f8fdee0c2f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 610.335035] env[68674]: DEBUG nova.network.neutron [req-f4e8400f-a86f-4196-9db7-90727e41033f req-45131e75-7e3f-4f72-9e55-ed755a8e89a9 service nova] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Refreshing network info cache for port 0ef8fcab-d9ac-4837-bdf3-84d597114e1d {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 610.350571] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f17b5210-c3a6-4fdb-89d5-143cbd5980a3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.354433] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 610.354433] env[68674]: value = "task-3239527" [ 610.354433] env[68674]: _type = "Task" [ 610.354433] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.369515] env[68674]: DEBUG nova.compute.provider_tree [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 610.377674] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239527, 'name': CreateVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.445306] env[68674]: DEBUG nova.network.neutron [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Successfully updated port: b8d668c5-2f06-454e-842e-fdbe52dffa5e {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 610.560941] env[68674]: DEBUG oslo_concurrency.lockutils [None req-98605eeb-610d-452b-a13b-cc5931655fd8 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lock "e75d2bc7-f356-4443-9641-d9ebf35843cd" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.140s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 610.577368] env[68674]: DEBUG oslo_vmware.api [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Task: {'id': task-3239523, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.754009} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.577956] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] ae945f3f-fde8-4b25-a5bd-81014fc99690/ae945f3f-fde8-4b25-a5bd-81014fc99690.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 610.579063] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 610.579063] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-59a9a28f-edd8-48de-8811-f03f81f200b6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.590025] env[68674]: DEBUG oslo_vmware.api [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Waiting for the task: (returnval){ [ 610.590025] env[68674]: value = "task-3239528" [ 610.590025] env[68674]: _type = "Task" [ 610.590025] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.609118] env[68674]: DEBUG nova.compute.manager [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 610.614020] env[68674]: DEBUG oslo_vmware.api [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Task: {'id': task-3239528, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.655979] env[68674]: INFO nova.compute.manager [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Took 28.35 seconds to build instance. [ 610.868414] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239527, 'name': CreateVM_Task, 'duration_secs': 0.444977} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.868414] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 610.868821] env[68674]: DEBUG oslo_concurrency.lockutils [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.868993] env[68674]: DEBUG oslo_concurrency.lockutils [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 610.869318] env[68674]: DEBUG oslo_concurrency.lockutils [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 610.869555] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af7aa0ba-4cf0-47eb-90e2-250accfe1856 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.872819] env[68674]: DEBUG nova.scheduler.client.report [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 610.881783] env[68674]: DEBUG oslo_vmware.api [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Waiting for the task: (returnval){ [ 610.881783] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]520aba84-c12e-26eb-ef51-b3f0f4c54f45" [ 610.881783] env[68674]: _type = "Task" [ 610.881783] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.892404] env[68674]: DEBUG oslo_vmware.api [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]520aba84-c12e-26eb-ef51-b3f0f4c54f45, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.948091] env[68674]: DEBUG oslo_concurrency.lockutils [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "refresh_cache-3463e09e-dc2f-432c-9eff-8192c2616240" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.948091] env[68674]: DEBUG oslo_concurrency.lockutils [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquired lock "refresh_cache-3463e09e-dc2f-432c-9eff-8192c2616240" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 610.948091] env[68674]: DEBUG nova.network.neutron [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 611.103190] env[68674]: DEBUG oslo_vmware.api [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Task: {'id': task-3239528, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.146972} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.107402] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 611.108477] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58f4075c-0816-4e78-988c-73ca160d7382 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.139320] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Reconfiguring VM instance instance-00000014 to attach disk [datastore1] ae945f3f-fde8-4b25-a5bd-81014fc99690/ae945f3f-fde8-4b25-a5bd-81014fc99690.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 611.145993] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1a49e378-3c4c-4350-adce-c1e21e8bddb4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.164845] env[68674]: DEBUG oslo_concurrency.lockutils [None req-09770772-deee-4222-a9b4-e5384965731c tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Lock "f500b495-7bfb-40ff-8a10-e46ca6744902" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.098s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 611.176092] env[68674]: DEBUG oslo_vmware.api [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Waiting for the task: (returnval){ [ 611.176092] env[68674]: value = "task-3239529" [ 611.176092] env[68674]: _type = "Task" [ 611.176092] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.181269] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 611.189064] env[68674]: DEBUG oslo_vmware.api [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Task: {'id': task-3239529, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.217292] env[68674]: DEBUG nova.network.neutron [req-f4e8400f-a86f-4196-9db7-90727e41033f req-45131e75-7e3f-4f72-9e55-ed755a8e89a9 service nova] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Updated VIF entry in instance network info cache for port 0ef8fcab-d9ac-4837-bdf3-84d597114e1d. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 611.217676] env[68674]: DEBUG nova.network.neutron [req-f4e8400f-a86f-4196-9db7-90727e41033f req-45131e75-7e3f-4f72-9e55-ed755a8e89a9 service nova] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Updating instance_info_cache with network_info: [{"id": "0ef8fcab-d9ac-4837-bdf3-84d597114e1d", "address": "fa:16:3e:01:82:11", "network": {"id": "c49e5bae-b7f8-4568-9b4e-78ce6b3506e1", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-2053822217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5555b26beb384c7680587cfdb67d9a10", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ef8fcab-d9", "ovs_interfaceid": "0ef8fcab-d9ac-4837-bdf3-84d597114e1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 611.379568] env[68674]: DEBUG oslo_concurrency.lockutils [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.933s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 611.379568] env[68674]: DEBUG nova.compute.manager [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 611.383216] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.121s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 611.384101] env[68674]: INFO nova.compute.claims [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 611.407369] env[68674]: DEBUG oslo_vmware.api [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]520aba84-c12e-26eb-ef51-b3f0f4c54f45, 'name': SearchDatastore_Task, 'duration_secs': 0.036718} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.407939] env[68674]: DEBUG oslo_concurrency.lockutils [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 611.408158] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 611.408565] env[68674]: DEBUG oslo_concurrency.lockutils [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.408793] env[68674]: DEBUG oslo_concurrency.lockutils [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 611.408965] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 611.409275] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a0823177-b7a6-4fdc-9578-57f399156f69 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.420585] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 611.420852] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 611.421629] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64ef264d-71d7-46f8-aa12-a547cbca9892 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.429614] env[68674]: DEBUG oslo_vmware.api [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Waiting for the task: (returnval){ [ 611.429614] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ed2373-b672-1751-55a6-0184bd92d182" [ 611.429614] env[68674]: _type = "Task" [ 611.429614] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.441636] env[68674]: DEBUG oslo_vmware.api [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ed2373-b672-1751-55a6-0184bd92d182, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.498966] env[68674]: DEBUG nova.network.neutron [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 611.666800] env[68674]: DEBUG nova.compute.manager [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 611.691494] env[68674]: DEBUG oslo_vmware.api [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Task: {'id': task-3239529, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.698928] env[68674]: DEBUG nova.compute.manager [req-2ca9870e-0534-45fb-a1c4-b75d82616161 req-4f50f7e8-c9cc-4bde-9396-7bddaa554337 service nova] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Received event network-vif-plugged-b8d668c5-2f06-454e-842e-fdbe52dffa5e {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 611.699239] env[68674]: DEBUG oslo_concurrency.lockutils [req-2ca9870e-0534-45fb-a1c4-b75d82616161 req-4f50f7e8-c9cc-4bde-9396-7bddaa554337 service nova] Acquiring lock "3463e09e-dc2f-432c-9eff-8192c2616240-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 611.699769] env[68674]: DEBUG oslo_concurrency.lockutils [req-2ca9870e-0534-45fb-a1c4-b75d82616161 req-4f50f7e8-c9cc-4bde-9396-7bddaa554337 service nova] Lock "3463e09e-dc2f-432c-9eff-8192c2616240-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 611.699769] env[68674]: DEBUG oslo_concurrency.lockutils [req-2ca9870e-0534-45fb-a1c4-b75d82616161 req-4f50f7e8-c9cc-4bde-9396-7bddaa554337 service nova] Lock "3463e09e-dc2f-432c-9eff-8192c2616240-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 611.699769] env[68674]: DEBUG nova.compute.manager [req-2ca9870e-0534-45fb-a1c4-b75d82616161 req-4f50f7e8-c9cc-4bde-9396-7bddaa554337 service nova] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] No waiting events found dispatching network-vif-plugged-b8d668c5-2f06-454e-842e-fdbe52dffa5e {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 611.699945] env[68674]: WARNING nova.compute.manager [req-2ca9870e-0534-45fb-a1c4-b75d82616161 req-4f50f7e8-c9cc-4bde-9396-7bddaa554337 service nova] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Received unexpected event network-vif-plugged-b8d668c5-2f06-454e-842e-fdbe52dffa5e for instance with vm_state building and task_state spawning. [ 611.701625] env[68674]: DEBUG nova.compute.manager [req-2ca9870e-0534-45fb-a1c4-b75d82616161 req-4f50f7e8-c9cc-4bde-9396-7bddaa554337 service nova] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Received event network-changed-b8d668c5-2f06-454e-842e-fdbe52dffa5e {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 611.704157] env[68674]: DEBUG nova.compute.manager [req-2ca9870e-0534-45fb-a1c4-b75d82616161 req-4f50f7e8-c9cc-4bde-9396-7bddaa554337 service nova] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Refreshing instance network info cache due to event network-changed-b8d668c5-2f06-454e-842e-fdbe52dffa5e. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 611.704157] env[68674]: DEBUG oslo_concurrency.lockutils [req-2ca9870e-0534-45fb-a1c4-b75d82616161 req-4f50f7e8-c9cc-4bde-9396-7bddaa554337 service nova] Acquiring lock "refresh_cache-3463e09e-dc2f-432c-9eff-8192c2616240" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.724848] env[68674]: DEBUG oslo_concurrency.lockutils [req-f4e8400f-a86f-4196-9db7-90727e41033f req-45131e75-7e3f-4f72-9e55-ed755a8e89a9 service nova] Releasing lock "refresh_cache-fa89e0b5-590d-43fb-bb11-02f8fdee0c2f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 611.799229] env[68674]: DEBUG nova.network.neutron [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Updating instance_info_cache with network_info: [{"id": "b8d668c5-2f06-454e-842e-fdbe52dffa5e", "address": "fa:16:3e:f0:59:2d", "network": {"id": "d0e868c8-42eb-4685-8a15-4b3a8cc40530", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-379831430-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fa7abd14180453bb12e9dd5fc24523f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8d668c5-2f", "ovs_interfaceid": "b8d668c5-2f06-454e-842e-fdbe52dffa5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 611.890667] env[68674]: DEBUG nova.compute.utils [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 611.898596] env[68674]: DEBUG nova.compute.manager [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 611.898596] env[68674]: DEBUG nova.network.neutron [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 611.900445] env[68674]: DEBUG nova.compute.manager [req-646d7922-e1d9-4555-b227-14d9b9c7cbf8 req-816c3629-eff4-42eb-a63b-d128297b2db7 service nova] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Received event network-changed-21aba0b8-ff69-4bec-829d-29a8f6941877 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 611.900634] env[68674]: DEBUG nova.compute.manager [req-646d7922-e1d9-4555-b227-14d9b9c7cbf8 req-816c3629-eff4-42eb-a63b-d128297b2db7 service nova] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Refreshing instance network info cache due to event network-changed-21aba0b8-ff69-4bec-829d-29a8f6941877. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 611.900843] env[68674]: DEBUG oslo_concurrency.lockutils [req-646d7922-e1d9-4555-b227-14d9b9c7cbf8 req-816c3629-eff4-42eb-a63b-d128297b2db7 service nova] Acquiring lock "refresh_cache-50bb7509-b7e9-4dc3-9746-acd46010cc26" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.900984] env[68674]: DEBUG oslo_concurrency.lockutils [req-646d7922-e1d9-4555-b227-14d9b9c7cbf8 req-816c3629-eff4-42eb-a63b-d128297b2db7 service nova] Acquired lock "refresh_cache-50bb7509-b7e9-4dc3-9746-acd46010cc26" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 611.901152] env[68674]: DEBUG nova.network.neutron [req-646d7922-e1d9-4555-b227-14d9b9c7cbf8 req-816c3629-eff4-42eb-a63b-d128297b2db7 service nova] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Refreshing network info cache for port 21aba0b8-ff69-4bec-829d-29a8f6941877 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 611.941800] env[68674]: DEBUG oslo_vmware.api [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ed2373-b672-1751-55a6-0184bd92d182, 'name': SearchDatastore_Task, 'duration_secs': 0.029925} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.942374] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62a270f6-8f99-4266-9671-b44a2436d1bd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.948814] env[68674]: DEBUG oslo_vmware.api [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Waiting for the task: (returnval){ [ 611.948814] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52962635-67fa-5b67-fd59-528efe83c03b" [ 611.948814] env[68674]: _type = "Task" [ 611.948814] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.957941] env[68674]: DEBUG oslo_vmware.api [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52962635-67fa-5b67-fd59-528efe83c03b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.148533] env[68674]: DEBUG nova.policy [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8942b53e01ca49c38b9c8be52bfa02fb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7d1b163422ef4e798ffc2ef3c5c7d2ba', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 612.194214] env[68674]: DEBUG oslo_vmware.api [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Task: {'id': task-3239529, 'name': ReconfigVM_Task, 'duration_secs': 0.741783} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.194214] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Reconfigured VM instance instance-00000014 to attach disk [datastore1] ae945f3f-fde8-4b25-a5bd-81014fc99690/ae945f3f-fde8-4b25-a5bd-81014fc99690.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 612.194214] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-12687196-b1c8-45cf-bf1f-1d1fc471d962 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.196433] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 612.205309] env[68674]: DEBUG oslo_vmware.api [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Waiting for the task: (returnval){ [ 612.205309] env[68674]: value = "task-3239531" [ 612.205309] env[68674]: _type = "Task" [ 612.205309] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.216755] env[68674]: DEBUG oslo_vmware.api [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Task: {'id': task-3239531, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.300555] env[68674]: DEBUG oslo_concurrency.lockutils [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Releasing lock "refresh_cache-3463e09e-dc2f-432c-9eff-8192c2616240" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 612.300895] env[68674]: DEBUG nova.compute.manager [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Instance network_info: |[{"id": "b8d668c5-2f06-454e-842e-fdbe52dffa5e", "address": "fa:16:3e:f0:59:2d", "network": {"id": "d0e868c8-42eb-4685-8a15-4b3a8cc40530", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-379831430-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fa7abd14180453bb12e9dd5fc24523f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8d668c5-2f", "ovs_interfaceid": "b8d668c5-2f06-454e-842e-fdbe52dffa5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 612.302217] env[68674]: DEBUG oslo_concurrency.lockutils [req-2ca9870e-0534-45fb-a1c4-b75d82616161 req-4f50f7e8-c9cc-4bde-9396-7bddaa554337 service nova] Acquired lock "refresh_cache-3463e09e-dc2f-432c-9eff-8192c2616240" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 612.302217] env[68674]: DEBUG nova.network.neutron [req-2ca9870e-0534-45fb-a1c4-b75d82616161 req-4f50f7e8-c9cc-4bde-9396-7bddaa554337 service nova] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Refreshing network info cache for port b8d668c5-2f06-454e-842e-fdbe52dffa5e {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 612.302842] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:59:2d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b5c34919-7d52-4a52-bab1-81af4c8182ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b8d668c5-2f06-454e-842e-fdbe52dffa5e', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 612.312654] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 612.315271] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 612.315271] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-32d33e2b-d1e1-4ce6-bef6-07e065db42e3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.340334] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 612.340334] env[68674]: value = "task-3239532" [ 612.340334] env[68674]: _type = "Task" [ 612.340334] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.353782] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239532, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.364912] env[68674]: DEBUG oslo_concurrency.lockutils [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Acquiring lock "3b0837ef-53fb-4851-b69f-ee0a1d89fbf8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 612.365188] env[68674]: DEBUG oslo_concurrency.lockutils [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Lock "3b0837ef-53fb-4851-b69f-ee0a1d89fbf8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 612.396823] env[68674]: DEBUG nova.compute.manager [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 612.466590] env[68674]: DEBUG oslo_vmware.api [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52962635-67fa-5b67-fd59-528efe83c03b, 'name': SearchDatastore_Task, 'duration_secs': 0.032627} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.466923] env[68674]: DEBUG oslo_concurrency.lockutils [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 612.467209] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] fa89e0b5-590d-43fb-bb11-02f8fdee0c2f/fa89e0b5-590d-43fb-bb11-02f8fdee0c2f.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 612.467496] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b81fa731-f102-4b14-bbd1-346a4ac36545 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.481029] env[68674]: DEBUG oslo_vmware.api [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Waiting for the task: (returnval){ [ 612.481029] env[68674]: value = "task-3239533" [ 612.481029] env[68674]: _type = "Task" [ 612.481029] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.488893] env[68674]: DEBUG oslo_vmware.api [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3239533, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.668850] env[68674]: DEBUG oslo_concurrency.lockutils [None req-265f2fc5-e9ec-42e0-8eb5-ad18b4a987df tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Acquiring lock "f500b495-7bfb-40ff-8a10-e46ca6744902" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 612.669250] env[68674]: DEBUG oslo_concurrency.lockutils [None req-265f2fc5-e9ec-42e0-8eb5-ad18b4a987df tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Lock "f500b495-7bfb-40ff-8a10-e46ca6744902" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 612.669333] env[68674]: DEBUG oslo_concurrency.lockutils [None req-265f2fc5-e9ec-42e0-8eb5-ad18b4a987df tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Acquiring lock "f500b495-7bfb-40ff-8a10-e46ca6744902-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 612.669528] env[68674]: DEBUG oslo_concurrency.lockutils [None req-265f2fc5-e9ec-42e0-8eb5-ad18b4a987df tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Lock "f500b495-7bfb-40ff-8a10-e46ca6744902-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 612.669691] env[68674]: DEBUG oslo_concurrency.lockutils [None req-265f2fc5-e9ec-42e0-8eb5-ad18b4a987df tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Lock "f500b495-7bfb-40ff-8a10-e46ca6744902-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 612.671820] env[68674]: INFO nova.compute.manager [None req-265f2fc5-e9ec-42e0-8eb5-ad18b4a987df tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Terminating instance [ 612.716309] env[68674]: DEBUG oslo_vmware.api [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Task: {'id': task-3239531, 'name': Rename_Task, 'duration_secs': 0.222054} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.716677] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 612.716949] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-093cc36d-f985-44df-b908-ab448d2c22e6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.727679] env[68674]: DEBUG oslo_vmware.api [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Waiting for the task: (returnval){ [ 612.727679] env[68674]: value = "task-3239534" [ 612.727679] env[68674]: _type = "Task" [ 612.727679] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.740800] env[68674]: DEBUG oslo_vmware.api [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Task: {'id': task-3239534, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.790856] env[68674]: DEBUG nova.objects.instance [None req-d918acb8-8b4e-4316-8403-9634f85c2553 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lazy-loading 'flavor' on Instance uuid e75d2bc7-f356-4443-9641-d9ebf35843cd {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 612.857134] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239532, 'name': CreateVM_Task, 'duration_secs': 0.474024} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.857326] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 612.858104] env[68674]: DEBUG oslo_concurrency.lockutils [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.858667] env[68674]: DEBUG oslo_concurrency.lockutils [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 612.859036] env[68674]: DEBUG oslo_concurrency.lockutils [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 612.859421] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f87658ae-5bcd-47fd-bbe9-7cdc3f38fd68 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.870429] env[68674]: DEBUG oslo_vmware.api [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 612.870429] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5205a109-c25a-fd0f-f438-60d3e6d51762" [ 612.870429] env[68674]: _type = "Task" [ 612.870429] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.881139] env[68674]: DEBUG oslo_vmware.api [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5205a109-c25a-fd0f-f438-60d3e6d51762, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.994519] env[68674]: DEBUG oslo_vmware.api [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3239533, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.062453] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ab4bd7e-77ac-459b-8172-7e7400018ba9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.074015] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98c6a080-0620-43c2-b06b-1f5f86d8477c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.080751] env[68674]: DEBUG nova.network.neutron [req-646d7922-e1d9-4555-b227-14d9b9c7cbf8 req-816c3629-eff4-42eb-a63b-d128297b2db7 service nova] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Updated VIF entry in instance network info cache for port 21aba0b8-ff69-4bec-829d-29a8f6941877. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 613.081289] env[68674]: DEBUG nova.network.neutron [req-646d7922-e1d9-4555-b227-14d9b9c7cbf8 req-816c3629-eff4-42eb-a63b-d128297b2db7 service nova] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Updating instance_info_cache with network_info: [{"id": "21aba0b8-ff69-4bec-829d-29a8f6941877", "address": "fa:16:3e:30:46:30", "network": {"id": "f11cd326-6319-47eb-833d-5282731628e9", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-287739122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.139", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e05a97545e94e8a9be8f382457d79b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f1b507ed-cd2d-4c09-9d96-c47bde6a7774", "external-id": "nsx-vlan-transportzone-980", "segmentation_id": 980, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21aba0b8-ff", "ovs_interfaceid": "21aba0b8-ff69-4bec-829d-29a8f6941877", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.120873] env[68674]: DEBUG oslo_concurrency.lockutils [req-646d7922-e1d9-4555-b227-14d9b9c7cbf8 req-816c3629-eff4-42eb-a63b-d128297b2db7 service nova] Releasing lock "refresh_cache-50bb7509-b7e9-4dc3-9746-acd46010cc26" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 613.121862] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6427c831-a4b6-489f-b0fa-a5c55b0f7037 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.132409] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f47e41-48d7-4544-8398-b40519f3d35b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.155083] env[68674]: DEBUG nova.compute.provider_tree [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 613.176059] env[68674]: DEBUG nova.compute.manager [None req-265f2fc5-e9ec-42e0-8eb5-ad18b4a987df tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 613.176312] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-265f2fc5-e9ec-42e0-8eb5-ad18b4a987df tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 613.177212] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d956bc9f-ded3-47c7-bf30-13f1b75b7cb7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.192794] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-265f2fc5-e9ec-42e0-8eb5-ad18b4a987df tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 613.193253] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-544189ee-df65-438a-b4e3-9eb54704e706 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.196510] env[68674]: INFO nova.compute.manager [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Rebuilding instance [ 613.200343] env[68674]: DEBUG nova.network.neutron [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Successfully created port: 07dd5c65-5707-47fd-baf2-f100c76d737f {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 613.208735] env[68674]: DEBUG oslo_vmware.api [None req-265f2fc5-e9ec-42e0-8eb5-ad18b4a987df tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Waiting for the task: (returnval){ [ 613.208735] env[68674]: value = "task-3239535" [ 613.208735] env[68674]: _type = "Task" [ 613.208735] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.223858] env[68674]: DEBUG oslo_vmware.api [None req-265f2fc5-e9ec-42e0-8eb5-ad18b4a987df tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Task: {'id': task-3239535, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.244724] env[68674]: DEBUG oslo_vmware.api [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Task: {'id': task-3239534, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.252904] env[68674]: DEBUG nova.compute.manager [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 613.253770] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4c86d8e-279d-4e5d-96c2-57a4be520b93 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.301762] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d918acb8-8b4e-4316-8403-9634f85c2553 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquiring lock "refresh_cache-e75d2bc7-f356-4443-9641-d9ebf35843cd" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 613.301986] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d918acb8-8b4e-4316-8403-9634f85c2553 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquired lock "refresh_cache-e75d2bc7-f356-4443-9641-d9ebf35843cd" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 613.302187] env[68674]: DEBUG nova.network.neutron [None req-d918acb8-8b4e-4316-8403-9634f85c2553 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 613.302367] env[68674]: DEBUG nova.objects.instance [None req-d918acb8-8b4e-4316-8403-9634f85c2553 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lazy-loading 'info_cache' on Instance uuid e75d2bc7-f356-4443-9641-d9ebf35843cd {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 613.381750] env[68674]: DEBUG oslo_vmware.api [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5205a109-c25a-fd0f-f438-60d3e6d51762, 'name': SearchDatastore_Task, 'duration_secs': 0.059929} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.382102] env[68674]: DEBUG oslo_concurrency.lockutils [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 613.382394] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 613.382655] env[68674]: DEBUG oslo_concurrency.lockutils [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 613.382804] env[68674]: DEBUG oslo_concurrency.lockutils [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 613.383480] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 613.383480] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7af04f3b-0867-4667-b6d8-fbc8f455dff4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.397420] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 613.397624] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 613.398505] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9a559de-9e18-4ec9-9c58-158a1c5c6aac {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.404809] env[68674]: DEBUG oslo_vmware.api [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 613.404809] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5256bf87-d039-3848-fba0-ec9de4890035" [ 613.404809] env[68674]: _type = "Task" [ 613.404809] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.412310] env[68674]: DEBUG nova.compute.manager [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 613.417664] env[68674]: DEBUG oslo_vmware.api [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5256bf87-d039-3848-fba0-ec9de4890035, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.418507] env[68674]: DEBUG nova.network.neutron [req-2ca9870e-0534-45fb-a1c4-b75d82616161 req-4f50f7e8-c9cc-4bde-9396-7bddaa554337 service nova] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Updated VIF entry in instance network info cache for port b8d668c5-2f06-454e-842e-fdbe52dffa5e. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 613.418830] env[68674]: DEBUG nova.network.neutron [req-2ca9870e-0534-45fb-a1c4-b75d82616161 req-4f50f7e8-c9cc-4bde-9396-7bddaa554337 service nova] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Updating instance_info_cache with network_info: [{"id": "b8d668c5-2f06-454e-842e-fdbe52dffa5e", "address": "fa:16:3e:f0:59:2d", "network": {"id": "d0e868c8-42eb-4685-8a15-4b3a8cc40530", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-379831430-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fa7abd14180453bb12e9dd5fc24523f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8d668c5-2f", "ovs_interfaceid": "b8d668c5-2f06-454e-842e-fdbe52dffa5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.445930] env[68674]: DEBUG nova.virt.hardware [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 613.446186] env[68674]: DEBUG nova.virt.hardware [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 613.446346] env[68674]: DEBUG nova.virt.hardware [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 613.446593] env[68674]: DEBUG nova.virt.hardware [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 613.446698] env[68674]: DEBUG nova.virt.hardware [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 613.446793] env[68674]: DEBUG nova.virt.hardware [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 613.447129] env[68674]: DEBUG nova.virt.hardware [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 613.447225] env[68674]: DEBUG nova.virt.hardware [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 613.447320] env[68674]: DEBUG nova.virt.hardware [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 613.447578] env[68674]: DEBUG nova.virt.hardware [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 613.447681] env[68674]: DEBUG nova.virt.hardware [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 613.448863] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8527aba-30d6-4730-b054-9850799f584a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.459167] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bc1fc33-50bc-497d-a6bf-b7067fe2d1dd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.490759] env[68674]: DEBUG oslo_vmware.api [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3239533, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.565554} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.491074] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] fa89e0b5-590d-43fb-bb11-02f8fdee0c2f/fa89e0b5-590d-43fb-bb11-02f8fdee0c2f.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 613.491305] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 613.491562] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e55ec5bd-d159-4936-be4b-7f8ba83d046d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.500334] env[68674]: DEBUG oslo_vmware.api [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Waiting for the task: (returnval){ [ 613.500334] env[68674]: value = "task-3239536" [ 613.500334] env[68674]: _type = "Task" [ 613.500334] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.509452] env[68674]: DEBUG oslo_vmware.api [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3239536, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.659359] env[68674]: DEBUG nova.scheduler.client.report [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 613.719033] env[68674]: DEBUG oslo_vmware.api [None req-265f2fc5-e9ec-42e0-8eb5-ad18b4a987df tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Task: {'id': task-3239535, 'name': PowerOffVM_Task, 'duration_secs': 0.305484} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.719320] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-265f2fc5-e9ec-42e0-8eb5-ad18b4a987df tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 613.719468] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-265f2fc5-e9ec-42e0-8eb5-ad18b4a987df tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 613.719714] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-08b54153-589c-4647-9433-8d322b21c418 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.743187] env[68674]: DEBUG oslo_vmware.api [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Task: {'id': task-3239534, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.808090] env[68674]: DEBUG nova.objects.base [None req-d918acb8-8b4e-4316-8403-9634f85c2553 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=68674) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 613.831695] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-265f2fc5-e9ec-42e0-8eb5-ad18b4a987df tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 613.831695] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-265f2fc5-e9ec-42e0-8eb5-ad18b4a987df tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 613.831695] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-265f2fc5-e9ec-42e0-8eb5-ad18b4a987df tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Deleting the datastore file [datastore1] f500b495-7bfb-40ff-8a10-e46ca6744902 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 613.831695] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c2342baa-645f-4ae2-acdc-20b980eda9b0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.842476] env[68674]: DEBUG oslo_vmware.api [None req-265f2fc5-e9ec-42e0-8eb5-ad18b4a987df tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Waiting for the task: (returnval){ [ 613.842476] env[68674]: value = "task-3239538" [ 613.842476] env[68674]: _type = "Task" [ 613.842476] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.851723] env[68674]: DEBUG oslo_vmware.api [None req-265f2fc5-e9ec-42e0-8eb5-ad18b4a987df tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Task: {'id': task-3239538, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.916440] env[68674]: DEBUG oslo_vmware.api [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5256bf87-d039-3848-fba0-ec9de4890035, 'name': SearchDatastore_Task, 'duration_secs': 0.035355} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.917277] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9620bfb9-8329-4abe-9f30-1f4ff0479069 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.923975] env[68674]: DEBUG oslo_vmware.api [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 613.923975] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52394854-6e36-a52e-9a93-a83073068831" [ 613.923975] env[68674]: _type = "Task" [ 613.923975] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.925123] env[68674]: DEBUG oslo_concurrency.lockutils [req-2ca9870e-0534-45fb-a1c4-b75d82616161 req-4f50f7e8-c9cc-4bde-9396-7bddaa554337 service nova] Releasing lock "refresh_cache-3463e09e-dc2f-432c-9eff-8192c2616240" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 613.939190] env[68674]: DEBUG oslo_vmware.api [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52394854-6e36-a52e-9a93-a83073068831, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.013901] env[68674]: DEBUG oslo_vmware.api [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3239536, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.0796} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.014199] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 614.015041] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc224d24-d86b-4256-b8bb-9cbf83bcb4a1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.039152] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Reconfiguring VM instance instance-00000015 to attach disk [datastore1] fa89e0b5-590d-43fb-bb11-02f8fdee0c2f/fa89e0b5-590d-43fb-bb11-02f8fdee0c2f.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 614.041340] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6bc796ec-7fa7-4a5d-a81e-0869aefd6e0d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.062030] env[68674]: DEBUG oslo_vmware.api [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Waiting for the task: (returnval){ [ 614.062030] env[68674]: value = "task-3239540" [ 614.062030] env[68674]: _type = "Task" [ 614.062030] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.070587] env[68674]: DEBUG oslo_vmware.api [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3239540, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.169455] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.787s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 614.170013] env[68674]: DEBUG nova.compute.manager [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 614.172704] env[68674]: DEBUG oslo_concurrency.lockutils [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.326s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 614.172938] env[68674]: DEBUG nova.objects.instance [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Lazy-loading 'resources' on Instance uuid a123c5f2-e775-4dd2-9a5a-35e7d6705dfa {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 614.245512] env[68674]: DEBUG oslo_vmware.api [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Task: {'id': task-3239534, 'name': PowerOnVM_Task, 'duration_secs': 1.257187} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.245777] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 614.245976] env[68674]: INFO nova.compute.manager [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Took 10.24 seconds to spawn the instance on the hypervisor. [ 614.246191] env[68674]: DEBUG nova.compute.manager [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 614.246968] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fc3d94e-8e2d-421c-be22-bc5daed0f16e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.270809] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 614.270809] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-382f235d-c1a5-478c-b586-fa84871d25b2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.279867] env[68674]: DEBUG oslo_vmware.api [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 614.279867] env[68674]: value = "task-3239541" [ 614.279867] env[68674]: _type = "Task" [ 614.279867] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.292239] env[68674]: DEBUG oslo_vmware.api [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239541, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.355889] env[68674]: DEBUG oslo_vmware.api [None req-265f2fc5-e9ec-42e0-8eb5-ad18b4a987df tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Task: {'id': task-3239538, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.25318} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.357065] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-265f2fc5-e9ec-42e0-8eb5-ad18b4a987df tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 614.357065] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-265f2fc5-e9ec-42e0-8eb5-ad18b4a987df tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 614.357065] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-265f2fc5-e9ec-42e0-8eb5-ad18b4a987df tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 614.357065] env[68674]: INFO nova.compute.manager [None req-265f2fc5-e9ec-42e0-8eb5-ad18b4a987df tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Took 1.18 seconds to destroy the instance on the hypervisor. [ 614.357065] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-265f2fc5-e9ec-42e0-8eb5-ad18b4a987df tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 614.357371] env[68674]: DEBUG nova.compute.manager [-] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 614.357371] env[68674]: DEBUG nova.network.neutron [-] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 614.442621] env[68674]: DEBUG oslo_vmware.api [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52394854-6e36-a52e-9a93-a83073068831, 'name': SearchDatastore_Task, 'duration_secs': 0.016048} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.443376] env[68674]: DEBUG oslo_concurrency.lockutils [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 614.443703] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 3463e09e-dc2f-432c-9eff-8192c2616240/3463e09e-dc2f-432c-9eff-8192c2616240.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 614.443997] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-904da964-b381-4e2e-a91b-fcc76513166e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.452277] env[68674]: DEBUG oslo_vmware.api [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 614.452277] env[68674]: value = "task-3239542" [ 614.452277] env[68674]: _type = "Task" [ 614.452277] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.462993] env[68674]: DEBUG oslo_vmware.api [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239542, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.572687] env[68674]: DEBUG oslo_vmware.api [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3239540, 'name': ReconfigVM_Task, 'duration_secs': 0.329522} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.573047] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Reconfigured VM instance instance-00000015 to attach disk [datastore1] fa89e0b5-590d-43fb-bb11-02f8fdee0c2f/fa89e0b5-590d-43fb-bb11-02f8fdee0c2f.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 614.573688] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-300f9116-ffc9-4337-8bab-9f7a23b92ca1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.581836] env[68674]: DEBUG oslo_vmware.api [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Waiting for the task: (returnval){ [ 614.581836] env[68674]: value = "task-3239543" [ 614.581836] env[68674]: _type = "Task" [ 614.581836] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.596699] env[68674]: DEBUG oslo_vmware.api [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3239543, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.678860] env[68674]: DEBUG nova.compute.utils [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 614.685475] env[68674]: DEBUG nova.compute.manager [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 614.685475] env[68674]: DEBUG nova.network.neutron [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 614.685840] env[68674]: DEBUG nova.network.neutron [None req-d918acb8-8b4e-4316-8403-9634f85c2553 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Updating instance_info_cache with network_info: [{"id": "81afa256-db6b-44e2-944a-7654579b8b50", "address": "fa:16:3e:11:0a:6e", "network": {"id": "51117274-b276-4648-9ae8-13d7da21bce9", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1367196897-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be6579f30b2c418c98a5a373176baf05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "713e54d5-283f-493d-b003-f13182deaf7b", "external-id": "cl2-zone-703", "segmentation_id": 703, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81afa256-db", "ovs_interfaceid": "81afa256-db6b-44e2-944a-7654579b8b50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.774030] env[68674]: INFO nova.compute.manager [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Took 26.72 seconds to build instance. [ 614.798325] env[68674]: DEBUG oslo_vmware.api [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239541, 'name': PowerOffVM_Task, 'duration_secs': 0.290464} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.799694] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 614.799694] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 614.800589] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8fbe788-56d5-4e87-90fa-9b2fdbcd8a9c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.809806] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 614.810345] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1764b78f-4192-424d-ac14-04cd891952d8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.813516] env[68674]: DEBUG nova.policy [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '09a599ad1fc548ae828a4f45c0ea1c27', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '984922a84ae94240b8f0c176fa2da160', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 614.907205] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 614.907205] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 614.907205] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Deleting the datastore file [datastore2] 8790d635-fec5-4dcf-8cb0-220c2edec971 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 614.907205] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-38f3a008-65a4-4dbd-80ef-e90b67b9e525 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.915672] env[68674]: DEBUG oslo_vmware.api [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 614.915672] env[68674]: value = "task-3239545" [ 614.915672] env[68674]: _type = "Task" [ 614.915672] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.932621] env[68674]: DEBUG oslo_vmware.api [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239545, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.967221] env[68674]: DEBUG oslo_vmware.api [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239542, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.095226] env[68674]: DEBUG oslo_vmware.api [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3239543, 'name': Rename_Task, 'duration_secs': 0.159913} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.095510] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 615.095769] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f9ca33b3-d822-4f44-8a78-dcea075534ca {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.107822] env[68674]: DEBUG oslo_vmware.api [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Waiting for the task: (returnval){ [ 615.107822] env[68674]: value = "task-3239546" [ 615.107822] env[68674]: _type = "Task" [ 615.107822] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.117115] env[68674]: DEBUG oslo_vmware.api [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3239546, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.161715] env[68674]: DEBUG nova.network.neutron [-] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.182958] env[68674]: DEBUG nova.compute.manager [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 615.188113] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d918acb8-8b4e-4316-8403-9634f85c2553 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Releasing lock "refresh_cache-e75d2bc7-f356-4443-9641-d9ebf35843cd" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 615.280407] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3095bbd2-870e-45f9-8329-a3a0f080b6c8 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Lock "ae945f3f-fde8-4b25-a5bd-81014fc99690" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.647s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 615.316046] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01661eaa-055f-48e2-8419-3d0206fa9fd6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.327406] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa93f718-50ce-469a-97d1-bf249ea6db70 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.361068] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64c0a186-d5de-46a6-a893-c7c6fcf16ecb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.370712] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff48b9e5-48fe-4388-90dc-3b261acebaa0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.386938] env[68674]: DEBUG nova.compute.provider_tree [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 615.426263] env[68674]: DEBUG oslo_vmware.api [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239545, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.467579] env[68674]: DEBUG oslo_vmware.api [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239542, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.712618} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.467848] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 3463e09e-dc2f-432c-9eff-8192c2616240/3463e09e-dc2f-432c-9eff-8192c2616240.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 615.468097] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 615.468387] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b2847d62-5853-4958-8e48-aaedc826f467 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.477809] env[68674]: DEBUG oslo_vmware.api [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 615.477809] env[68674]: value = "task-3239547" [ 615.477809] env[68674]: _type = "Task" [ 615.477809] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.490544] env[68674]: DEBUG oslo_vmware.api [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239547, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.524672] env[68674]: DEBUG nova.compute.manager [req-502e4419-a6b1-4d63-9921-0ba0325651b1 req-6166dd4b-d9d9-464c-a2a7-87ee690f99b8 service nova] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Received event network-vif-deleted-98baa11f-640b-4afd-98c6-363727b60907 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 615.558791] env[68674]: DEBUG nova.network.neutron [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Successfully updated port: 07dd5c65-5707-47fd-baf2-f100c76d737f {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 615.620449] env[68674]: DEBUG oslo_vmware.api [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3239546, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.652762] env[68674]: DEBUG nova.network.neutron [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Successfully created port: c0118b5a-b34d-4c54-8270-5f4ce3b9e18d {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 615.664478] env[68674]: INFO nova.compute.manager [-] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Took 1.31 seconds to deallocate network for instance. [ 615.784379] env[68674]: DEBUG nova.compute.manager [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 615.890554] env[68674]: DEBUG nova.scheduler.client.report [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 615.932118] env[68674]: DEBUG oslo_vmware.api [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239545, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.538693} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.932118] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 615.932334] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 615.932574] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 615.988948] env[68674]: DEBUG oslo_vmware.api [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239547, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072015} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.988948] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 615.989978] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfc43752-8a82-47ee-8f3c-48571b15815f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.018623] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Reconfiguring VM instance instance-00000016 to attach disk [datastore1] 3463e09e-dc2f-432c-9eff-8192c2616240/3463e09e-dc2f-432c-9eff-8192c2616240.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 616.018623] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b84495b-fed6-40db-97d3-12cad13abc24 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.043648] env[68674]: DEBUG oslo_vmware.api [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 616.043648] env[68674]: value = "task-3239548" [ 616.043648] env[68674]: _type = "Task" [ 616.043648] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.056189] env[68674]: DEBUG oslo_vmware.api [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239548, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.063906] env[68674]: DEBUG oslo_concurrency.lockutils [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Acquiring lock "refresh_cache-627fb348-1749-4480-97b9-b479a182d4ee" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 616.063906] env[68674]: DEBUG oslo_concurrency.lockutils [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Acquired lock "refresh_cache-627fb348-1749-4480-97b9-b479a182d4ee" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 616.063906] env[68674]: DEBUG nova.network.neutron [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 616.121732] env[68674]: DEBUG oslo_vmware.api [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3239546, 'name': PowerOnVM_Task, 'duration_secs': 0.715021} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.122011] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 616.122223] env[68674]: INFO nova.compute.manager [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Took 9.40 seconds to spawn the instance on the hypervisor. [ 616.122407] env[68674]: DEBUG nova.compute.manager [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 616.123208] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d780248e-33c9-4496-8032-efa938fa6526 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.171452] env[68674]: DEBUG oslo_concurrency.lockutils [None req-265f2fc5-e9ec-42e0-8eb5-ad18b4a987df tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 616.197523] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d918acb8-8b4e-4316-8403-9634f85c2553 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 616.198652] env[68674]: DEBUG nova.compute.manager [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 616.201255] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-558abbb7-b26f-4e43-a6a7-f0f1a3fb6643 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.213067] env[68674]: DEBUG oslo_vmware.api [None req-d918acb8-8b4e-4316-8403-9634f85c2553 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for the task: (returnval){ [ 616.213067] env[68674]: value = "task-3239550" [ 616.213067] env[68674]: _type = "Task" [ 616.213067] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.232442] env[68674]: DEBUG oslo_vmware.api [None req-d918acb8-8b4e-4316-8403-9634f85c2553 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239550, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.239473] env[68674]: DEBUG nova.virt.hardware [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 616.239473] env[68674]: DEBUG nova.virt.hardware [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 616.239473] env[68674]: DEBUG nova.virt.hardware [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 616.239722] env[68674]: DEBUG nova.virt.hardware [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 616.239722] env[68674]: DEBUG nova.virt.hardware [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 616.239722] env[68674]: DEBUG nova.virt.hardware [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 616.239722] env[68674]: DEBUG nova.virt.hardware [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 616.239884] env[68674]: DEBUG nova.virt.hardware [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 616.240106] env[68674]: DEBUG nova.virt.hardware [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 616.240361] env[68674]: DEBUG nova.virt.hardware [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 616.240564] env[68674]: DEBUG nova.virt.hardware [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 616.241553] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24213d10-b74d-462d-9882-62726dcc0961 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.251254] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-202b7e3a-c371-46ad-882a-4eea2e6812dd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.316158] env[68674]: DEBUG oslo_concurrency.lockutils [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 616.396153] env[68674]: DEBUG oslo_concurrency.lockutils [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.223s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 616.399516] env[68674]: DEBUG oslo_concurrency.lockutils [None req-86d8f11e-4187-4e90-9b6c-2a0a5c4d9342 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.685s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 616.399516] env[68674]: DEBUG nova.objects.instance [None req-86d8f11e-4187-4e90-9b6c-2a0a5c4d9342 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Lazy-loading 'resources' on Instance uuid 58830b0e-dbf3-424d-8b9a-bb298b6bea21 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 616.422649] env[68674]: INFO nova.scheduler.client.report [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Deleted allocations for instance a123c5f2-e775-4dd2-9a5a-35e7d6705dfa [ 616.562615] env[68674]: DEBUG oslo_vmware.api [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239548, 'name': ReconfigVM_Task, 'duration_secs': 0.358367} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.562981] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Reconfigured VM instance instance-00000016 to attach disk [datastore1] 3463e09e-dc2f-432c-9eff-8192c2616240/3463e09e-dc2f-432c-9eff-8192c2616240.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 616.563726] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ebdc4b70-3226-4279-88b0-3dcd27c83288 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.577313] env[68674]: DEBUG oslo_vmware.api [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 616.577313] env[68674]: value = "task-3239551" [ 616.577313] env[68674]: _type = "Task" [ 616.577313] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.590724] env[68674]: DEBUG oslo_vmware.api [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239551, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.635353] env[68674]: DEBUG nova.network.neutron [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 616.648061] env[68674]: INFO nova.compute.manager [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Took 26.89 seconds to build instance. [ 616.723196] env[68674]: DEBUG oslo_vmware.api [None req-d918acb8-8b4e-4316-8403-9634f85c2553 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239550, 'name': PowerOnVM_Task, 'duration_secs': 0.494406} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.723639] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d918acb8-8b4e-4316-8403-9634f85c2553 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 616.723831] env[68674]: DEBUG nova.compute.manager [None req-d918acb8-8b4e-4316-8403-9634f85c2553 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 616.724770] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d64158be-084a-48c5-8aaf-a5b350c17364 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.890830] env[68674]: DEBUG nova.compute.manager [None req-45a1966e-bced-4717-812f-c2f711671660 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 616.891862] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0425187-74cf-41f8-9988-d41b2c8ca739 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.929775] env[68674]: DEBUG nova.network.neutron [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Updating instance_info_cache with network_info: [{"id": "07dd5c65-5707-47fd-baf2-f100c76d737f", "address": "fa:16:3e:85:97:58", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.24", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07dd5c65-57", "ovs_interfaceid": "07dd5c65-5707-47fd-baf2-f100c76d737f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.932101] env[68674]: DEBUG oslo_concurrency.lockutils [None req-306128b1-e6b2-4c2f-9f69-950375da3874 tempest-ServerDiagnosticsV248Test-810017807 tempest-ServerDiagnosticsV248Test-810017807-project-member] Lock "a123c5f2-e775-4dd2-9a5a-35e7d6705dfa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.852s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 616.982096] env[68674]: DEBUG nova.virt.hardware [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 616.982460] env[68674]: DEBUG nova.virt.hardware [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 616.982647] env[68674]: DEBUG nova.virt.hardware [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 616.982855] env[68674]: DEBUG nova.virt.hardware [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 616.984425] env[68674]: DEBUG nova.virt.hardware [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 616.984425] env[68674]: DEBUG nova.virt.hardware [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 616.984425] env[68674]: DEBUG nova.virt.hardware [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 616.984425] env[68674]: DEBUG nova.virt.hardware [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 616.984425] env[68674]: DEBUG nova.virt.hardware [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 616.984688] env[68674]: DEBUG nova.virt.hardware [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 616.984688] env[68674]: DEBUG nova.virt.hardware [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 616.986047] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48a678bb-a0a0-4961-8bb9-e053f8607a05 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.997905] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aa6e661-c550-43d6-a74e-e3f122304b83 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.015513] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9d:db:e2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '08fb4857-7f9b-4f97-86ef-415341fb595d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '926b65c9-79eb-4f2d-88ef-f00c20e240f5', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 617.023666] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 617.023973] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 617.024250] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e029ef62-e8bd-4855-b916-e497aaa181a3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.049615] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 617.049615] env[68674]: value = "task-3239552" [ 617.049615] env[68674]: _type = "Task" [ 617.049615] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.063028] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239552, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.092987] env[68674]: DEBUG oslo_vmware.api [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239551, 'name': Rename_Task, 'duration_secs': 0.225983} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.093296] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 617.093812] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6a4496f0-2d86-4b2d-90b5-b62badfe0f9d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.101054] env[68674]: DEBUG oslo_vmware.api [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 617.101054] env[68674]: value = "task-3239553" [ 617.101054] env[68674]: _type = "Task" [ 617.101054] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.119862] env[68674]: DEBUG oslo_vmware.api [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239553, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.149246] env[68674]: DEBUG oslo_concurrency.lockutils [None req-049d4f8a-1722-426d-b99d-ba090265bef6 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Lock "fa89e0b5-590d-43fb-bb11-02f8fdee0c2f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.910s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 617.419582] env[68674]: INFO nova.compute.manager [None req-45a1966e-bced-4717-812f-c2f711671660 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] instance snapshotting [ 617.427802] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-593246fe-fc96-46f8-8f1a-bc010426b655 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.433882] env[68674]: DEBUG oslo_concurrency.lockutils [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Releasing lock "refresh_cache-627fb348-1749-4480-97b9-b479a182d4ee" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 617.434107] env[68674]: DEBUG nova.compute.manager [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Instance network_info: |[{"id": "07dd5c65-5707-47fd-baf2-f100c76d737f", "address": "fa:16:3e:85:97:58", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.24", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07dd5c65-57", "ovs_interfaceid": "07dd5c65-5707-47fd-baf2-f100c76d737f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 617.449313] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:97:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'df1bf911-aac9-4d2d-ae69-66ace3e6a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '07dd5c65-5707-47fd-baf2-f100c76d737f', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 617.458956] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Creating folder: Project (7d1b163422ef4e798ffc2ef3c5c7d2ba). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 617.460415] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-12eef8db-8fd7-4825-b782-a9db79d709f1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.463204] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6c57f30-1d92-4110-a92d-3319ce65f817 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.478245] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Created folder: Project (7d1b163422ef4e798ffc2ef3c5c7d2ba) in parent group-v647377. [ 617.481769] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Creating folder: Instances. Parent ref: group-v647450. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 617.481769] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9853e708-e2ab-42da-9e42-8379a22a3a46 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.493937] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Created folder: Instances in parent group-v647450. [ 617.493937] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 617.493937] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 617.494167] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-faeb253b-af16-4904-b490-d7d4986761ac {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.519801] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 617.519801] env[68674]: value = "task-3239556" [ 617.519801] env[68674]: _type = "Task" [ 617.519801] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.533945] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239556, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.542574] env[68674]: DEBUG nova.network.neutron [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Successfully updated port: c0118b5a-b34d-4c54-8270-5f4ce3b9e18d {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 617.562337] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239552, 'name': CreateVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.599423] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae6ae803-2522-4263-96ca-b2857157d56a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.612054] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af510cfd-2f35-40cc-9606-dfd68f00f013 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.621874] env[68674]: DEBUG oslo_vmware.api [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239553, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.653133] env[68674]: DEBUG nova.compute.manager [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 617.656535] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2895cc0-eb51-45f7-a3b1-31776ccc137a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.666089] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d198a04a-a497-4f43-bd56-d5efeee28d24 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.682673] env[68674]: DEBUG nova.compute.provider_tree [None req-86d8f11e-4187-4e90-9b6c-2a0a5c4d9342 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 617.982399] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-45a1966e-bced-4717-812f-c2f711671660 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Creating Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 617.982399] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-6922e5cf-2f09-48c0-9ee5-b4706c4e0df2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.995589] env[68674]: DEBUG oslo_vmware.api [None req-45a1966e-bced-4717-812f-c2f711671660 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 617.995589] env[68674]: value = "task-3239557" [ 617.995589] env[68674]: _type = "Task" [ 617.995589] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.006257] env[68674]: DEBUG oslo_vmware.api [None req-45a1966e-bced-4717-812f-c2f711671660 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239557, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.032626] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239556, 'name': CreateVM_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.045929] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Acquiring lock "refresh_cache-f9168b78-ed64-4109-84f0-db0af61d2f10" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.045929] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Acquired lock "refresh_cache-f9168b78-ed64-4109-84f0-db0af61d2f10" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 618.045929] env[68674]: DEBUG nova.network.neutron [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 618.064960] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239552, 'name': CreateVM_Task, 'duration_secs': 0.574396} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.065279] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 618.066524] env[68674]: DEBUG oslo_concurrency.lockutils [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.066779] env[68674]: DEBUG oslo_concurrency.lockutils [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 618.067223] env[68674]: DEBUG oslo_concurrency.lockutils [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 618.068292] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b4d987e-dce3-4464-9daf-09a1f946fb3f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.079527] env[68674]: DEBUG oslo_vmware.api [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 618.079527] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5264ddf2-c5fe-4ce1-368a-d232cf5a3103" [ 618.079527] env[68674]: _type = "Task" [ 618.079527] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.090659] env[68674]: DEBUG oslo_vmware.api [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5264ddf2-c5fe-4ce1-368a-d232cf5a3103, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.120857] env[68674]: DEBUG oslo_vmware.api [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239553, 'name': PowerOnVM_Task, 'duration_secs': 0.912697} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.121251] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 618.121448] env[68674]: INFO nova.compute.manager [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Took 8.67 seconds to spawn the instance on the hypervisor. [ 618.121723] env[68674]: DEBUG nova.compute.manager [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 618.123205] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-777f7f63-4de5-4377-804d-60b35427e3be {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.188761] env[68674]: DEBUG nova.scheduler.client.report [None req-86d8f11e-4187-4e90-9b6c-2a0a5c4d9342 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 618.194611] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 618.212019] env[68674]: DEBUG nova.compute.manager [req-791b088d-4aec-4338-a743-78dd5241956d req-ed2bcdf2-3ebd-4b6f-894e-0fa0fecc5e7c service nova] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Received event network-vif-plugged-07dd5c65-5707-47fd-baf2-f100c76d737f {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 618.213048] env[68674]: DEBUG oslo_concurrency.lockutils [req-791b088d-4aec-4338-a743-78dd5241956d req-ed2bcdf2-3ebd-4b6f-894e-0fa0fecc5e7c service nova] Acquiring lock "627fb348-1749-4480-97b9-b479a182d4ee-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 618.215217] env[68674]: DEBUG oslo_concurrency.lockutils [req-791b088d-4aec-4338-a743-78dd5241956d req-ed2bcdf2-3ebd-4b6f-894e-0fa0fecc5e7c service nova] Lock "627fb348-1749-4480-97b9-b479a182d4ee-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 618.215217] env[68674]: DEBUG oslo_concurrency.lockutils [req-791b088d-4aec-4338-a743-78dd5241956d req-ed2bcdf2-3ebd-4b6f-894e-0fa0fecc5e7c service nova] Lock "627fb348-1749-4480-97b9-b479a182d4ee-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 618.215217] env[68674]: DEBUG nova.compute.manager [req-791b088d-4aec-4338-a743-78dd5241956d req-ed2bcdf2-3ebd-4b6f-894e-0fa0fecc5e7c service nova] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] No waiting events found dispatching network-vif-plugged-07dd5c65-5707-47fd-baf2-f100c76d737f {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 618.215217] env[68674]: WARNING nova.compute.manager [req-791b088d-4aec-4338-a743-78dd5241956d req-ed2bcdf2-3ebd-4b6f-894e-0fa0fecc5e7c service nova] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Received unexpected event network-vif-plugged-07dd5c65-5707-47fd-baf2-f100c76d737f for instance with vm_state building and task_state spawning. [ 618.215217] env[68674]: DEBUG nova.compute.manager [req-791b088d-4aec-4338-a743-78dd5241956d req-ed2bcdf2-3ebd-4b6f-894e-0fa0fecc5e7c service nova] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Received event network-changed-07dd5c65-5707-47fd-baf2-f100c76d737f {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 618.215424] env[68674]: DEBUG nova.compute.manager [req-791b088d-4aec-4338-a743-78dd5241956d req-ed2bcdf2-3ebd-4b6f-894e-0fa0fecc5e7c service nova] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Refreshing instance network info cache due to event network-changed-07dd5c65-5707-47fd-baf2-f100c76d737f. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 618.215424] env[68674]: DEBUG oslo_concurrency.lockutils [req-791b088d-4aec-4338-a743-78dd5241956d req-ed2bcdf2-3ebd-4b6f-894e-0fa0fecc5e7c service nova] Acquiring lock "refresh_cache-627fb348-1749-4480-97b9-b479a182d4ee" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.215424] env[68674]: DEBUG oslo_concurrency.lockutils [req-791b088d-4aec-4338-a743-78dd5241956d req-ed2bcdf2-3ebd-4b6f-894e-0fa0fecc5e7c service nova] Acquired lock "refresh_cache-627fb348-1749-4480-97b9-b479a182d4ee" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 618.215424] env[68674]: DEBUG nova.network.neutron [req-791b088d-4aec-4338-a743-78dd5241956d req-ed2bcdf2-3ebd-4b6f-894e-0fa0fecc5e7c service nova] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Refreshing network info cache for port 07dd5c65-5707-47fd-baf2-f100c76d737f {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 618.508761] env[68674]: DEBUG oslo_vmware.api [None req-45a1966e-bced-4717-812f-c2f711671660 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239557, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.532175] env[68674]: DEBUG nova.compute.manager [req-016de102-9c32-4d9d-9a85-278b217a8a91 req-1c5d0fd4-789b-48b7-804f-de527bef0eda service nova] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Received event network-changed-0ef8fcab-d9ac-4837-bdf3-84d597114e1d {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 618.533552] env[68674]: DEBUG nova.compute.manager [req-016de102-9c32-4d9d-9a85-278b217a8a91 req-1c5d0fd4-789b-48b7-804f-de527bef0eda service nova] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Refreshing instance network info cache due to event network-changed-0ef8fcab-d9ac-4837-bdf3-84d597114e1d. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 618.533552] env[68674]: DEBUG oslo_concurrency.lockutils [req-016de102-9c32-4d9d-9a85-278b217a8a91 req-1c5d0fd4-789b-48b7-804f-de527bef0eda service nova] Acquiring lock "refresh_cache-fa89e0b5-590d-43fb-bb11-02f8fdee0c2f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.534042] env[68674]: DEBUG oslo_concurrency.lockutils [req-016de102-9c32-4d9d-9a85-278b217a8a91 req-1c5d0fd4-789b-48b7-804f-de527bef0eda service nova] Acquired lock "refresh_cache-fa89e0b5-590d-43fb-bb11-02f8fdee0c2f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 618.534042] env[68674]: DEBUG nova.network.neutron [req-016de102-9c32-4d9d-9a85-278b217a8a91 req-1c5d0fd4-789b-48b7-804f-de527bef0eda service nova] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Refreshing network info cache for port 0ef8fcab-d9ac-4837-bdf3-84d597114e1d {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 618.542415] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239556, 'name': CreateVM_Task, 'duration_secs': 0.723336} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.542531] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 618.543321] env[68674]: DEBUG oslo_concurrency.lockutils [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.594383] env[68674]: DEBUG oslo_vmware.api [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5264ddf2-c5fe-4ce1-368a-d232cf5a3103, 'name': SearchDatastore_Task, 'duration_secs': 0.017479} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.594383] env[68674]: DEBUG oslo_concurrency.lockutils [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 618.594383] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 618.594383] env[68674]: DEBUG oslo_concurrency.lockutils [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.594630] env[68674]: DEBUG oslo_concurrency.lockutils [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 618.594630] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 618.594630] env[68674]: DEBUG oslo_concurrency.lockutils [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 618.594630] env[68674]: DEBUG oslo_concurrency.lockutils [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 618.594630] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-71e44401-c218-44ea-bef4-d432a5734802 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.594971] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-954271db-89b9-49fe-8bea-1bdb06da4116 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.602182] env[68674]: DEBUG oslo_vmware.api [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Waiting for the task: (returnval){ [ 618.602182] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d48c36-151c-b106-26de-e3597466de02" [ 618.602182] env[68674]: _type = "Task" [ 618.602182] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.603224] env[68674]: DEBUG nova.network.neutron [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 618.611707] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 618.611964] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 618.614115] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fdb9f3b6-6d08-425c-a75d-da4de6c7d12e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.625899] env[68674]: DEBUG oslo_vmware.api [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 618.625899] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5236ef17-8a8e-1b6b-a44e-3286c537abbe" [ 618.625899] env[68674]: _type = "Task" [ 618.625899] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.626216] env[68674]: DEBUG oslo_vmware.api [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d48c36-151c-b106-26de-e3597466de02, 'name': SearchDatastore_Task, 'duration_secs': 0.013076} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.626695] env[68674]: DEBUG oslo_concurrency.lockutils [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 618.627298] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 618.627298] env[68674]: DEBUG oslo_concurrency.lockutils [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.641560] env[68674]: DEBUG oslo_vmware.api [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5236ef17-8a8e-1b6b-a44e-3286c537abbe, 'name': SearchDatastore_Task, 'duration_secs': 0.01193} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.642547] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44c41c7b-6454-47c9-8417-90a7558c6e2f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.652407] env[68674]: INFO nova.compute.manager [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Took 24.05 seconds to build instance. [ 618.658277] env[68674]: DEBUG oslo_vmware.api [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 618.658277] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c89d64-fed0-c70c-1bc4-009f05368cbb" [ 618.658277] env[68674]: _type = "Task" [ 618.658277] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.668467] env[68674]: DEBUG oslo_vmware.api [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c89d64-fed0-c70c-1bc4-009f05368cbb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.696477] env[68674]: DEBUG oslo_concurrency.lockutils [None req-86d8f11e-4187-4e90-9b6c-2a0a5c4d9342 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.297s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 618.699332] env[68674]: DEBUG oslo_concurrency.lockutils [None req-590e8e33-94c0-4866-9706-0165d13ef7e1 tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.344s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 618.699696] env[68674]: DEBUG nova.objects.instance [None req-590e8e33-94c0-4866-9706-0165d13ef7e1 tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Lazy-loading 'resources' on Instance uuid d1c7a508-7d45-4eff-bb06-b85bfe392772 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 618.735839] env[68674]: INFO nova.scheduler.client.report [None req-86d8f11e-4187-4e90-9b6c-2a0a5c4d9342 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Deleted allocations for instance 58830b0e-dbf3-424d-8b9a-bb298b6bea21 [ 618.997113] env[68674]: DEBUG nova.network.neutron [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Updating instance_info_cache with network_info: [{"id": "c0118b5a-b34d-4c54-8270-5f4ce3b9e18d", "address": "fa:16:3e:4d:39:f3", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.177", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0118b5a-b3", "ovs_interfaceid": "c0118b5a-b34d-4c54-8270-5f4ce3b9e18d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 619.017042] env[68674]: DEBUG oslo_vmware.api [None req-45a1966e-bced-4717-812f-c2f711671660 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239557, 'name': CreateSnapshot_Task, 'duration_secs': 0.781749} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.018638] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-45a1966e-bced-4717-812f-c2f711671660 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Created Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 619.019654] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a6223f4-15bb-4c9a-9e3d-f7d2dfed554f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.157558] env[68674]: DEBUG oslo_concurrency.lockutils [None req-207da15f-50eb-424c-9a66-d694a66f30d5 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "3463e09e-dc2f-432c-9eff-8192c2616240" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.821s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 619.173595] env[68674]: DEBUG oslo_vmware.api [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c89d64-fed0-c70c-1bc4-009f05368cbb, 'name': SearchDatastore_Task, 'duration_secs': 0.012772} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.176247] env[68674]: DEBUG oslo_concurrency.lockutils [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 619.176696] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 8790d635-fec5-4dcf-8cb0-220c2edec971/8790d635-fec5-4dcf-8cb0-220c2edec971.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 619.177454] env[68674]: DEBUG oslo_concurrency.lockutils [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 619.177645] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 619.178408] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-04ef1fb8-aecd-4a13-b431-b9b04313f3ea {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.180498] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5e0719ce-6365-4966-bb32-b5c999540677 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.192802] env[68674]: DEBUG oslo_vmware.api [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 619.192802] env[68674]: value = "task-3239559" [ 619.192802] env[68674]: _type = "Task" [ 619.192802] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.192802] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 619.192802] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 619.197148] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cdaba423-e777-4b23-939b-bd7bc9ae0a1b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.214223] env[68674]: DEBUG oslo_vmware.api [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Waiting for the task: (returnval){ [ 619.214223] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5252f928-285b-a91e-6f51-57426db47754" [ 619.214223] env[68674]: _type = "Task" [ 619.214223] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.214223] env[68674]: DEBUG oslo_vmware.api [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239559, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.214223] env[68674]: DEBUG nova.network.neutron [req-791b088d-4aec-4338-a743-78dd5241956d req-ed2bcdf2-3ebd-4b6f-894e-0fa0fecc5e7c service nova] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Updated VIF entry in instance network info cache for port 07dd5c65-5707-47fd-baf2-f100c76d737f. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 619.214433] env[68674]: DEBUG nova.network.neutron [req-791b088d-4aec-4338-a743-78dd5241956d req-ed2bcdf2-3ebd-4b6f-894e-0fa0fecc5e7c service nova] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Updating instance_info_cache with network_info: [{"id": "07dd5c65-5707-47fd-baf2-f100c76d737f", "address": "fa:16:3e:85:97:58", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.24", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07dd5c65-57", "ovs_interfaceid": "07dd5c65-5707-47fd-baf2-f100c76d737f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 619.248012] env[68674]: DEBUG oslo_vmware.api [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5252f928-285b-a91e-6f51-57426db47754, 'name': SearchDatastore_Task, 'duration_secs': 0.03507} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.248501] env[68674]: DEBUG oslo_concurrency.lockutils [None req-86d8f11e-4187-4e90-9b6c-2a0a5c4d9342 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639 tempest-FloatingIPsAssociationNegativeTestJSON-1819660639-project-member] Lock "58830b0e-dbf3-424d-8b9a-bb298b6bea21" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.002s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 619.250485] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33c1fe6c-ad06-4f64-88d4-6ae2ae44491e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.258061] env[68674]: DEBUG oslo_vmware.api [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Waiting for the task: (returnval){ [ 619.258061] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d061a5-430b-8769-5275-a6641aa8607f" [ 619.258061] env[68674]: _type = "Task" [ 619.258061] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.273836] env[68674]: DEBUG oslo_vmware.api [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d061a5-430b-8769-5275-a6641aa8607f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.442432] env[68674]: DEBUG nova.network.neutron [req-016de102-9c32-4d9d-9a85-278b217a8a91 req-1c5d0fd4-789b-48b7-804f-de527bef0eda service nova] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Updated VIF entry in instance network info cache for port 0ef8fcab-d9ac-4837-bdf3-84d597114e1d. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 619.444247] env[68674]: DEBUG nova.network.neutron [req-016de102-9c32-4d9d-9a85-278b217a8a91 req-1c5d0fd4-789b-48b7-804f-de527bef0eda service nova] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Updating instance_info_cache with network_info: [{"id": "0ef8fcab-d9ac-4837-bdf3-84d597114e1d", "address": "fa:16:3e:01:82:11", "network": {"id": "c49e5bae-b7f8-4568-9b4e-78ce6b3506e1", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-2053822217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5555b26beb384c7680587cfdb67d9a10", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ef8fcab-d9", "ovs_interfaceid": "0ef8fcab-d9ac-4837-bdf3-84d597114e1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 619.505148] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Releasing lock "refresh_cache-f9168b78-ed64-4109-84f0-db0af61d2f10" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 619.505528] env[68674]: DEBUG nova.compute.manager [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Instance network_info: |[{"id": "c0118b5a-b34d-4c54-8270-5f4ce3b9e18d", "address": "fa:16:3e:4d:39:f3", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.177", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0118b5a-b3", "ovs_interfaceid": "c0118b5a-b34d-4c54-8270-5f4ce3b9e18d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 619.507118] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4d:39:f3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'df1bf911-aac9-4d2d-ae69-66ace3e6a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c0118b5a-b34d-4c54-8270-5f4ce3b9e18d', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 619.514780] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Creating folder: Project (984922a84ae94240b8f0c176fa2da160). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 619.518534] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6536b672-e277-4e33-ae83-5dad348cc8f2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.535559] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Created folder: Project (984922a84ae94240b8f0c176fa2da160) in parent group-v647377. [ 619.535559] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Creating folder: Instances. Parent ref: group-v647454. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 619.545598] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-45a1966e-bced-4717-812f-c2f711671660 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Creating linked-clone VM from snapshot {{(pid=68674) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 619.545892] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f82dfc09-75e7-4e73-9c3d-18984253dc7a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.548722] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e0732c35-d8e5-4858-91a5-3b2dae2d0796 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.560595] env[68674]: DEBUG oslo_vmware.api [None req-45a1966e-bced-4717-812f-c2f711671660 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 619.560595] env[68674]: value = "task-3239562" [ 619.560595] env[68674]: _type = "Task" [ 619.560595] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.568273] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Created folder: Instances in parent group-v647454. [ 619.568663] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 619.569174] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 619.569790] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e02c4568-75c8-487d-88b0-cc1a3d38d2ba {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.591910] env[68674]: DEBUG oslo_vmware.api [None req-45a1966e-bced-4717-812f-c2f711671660 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239562, 'name': CloneVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.600671] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 619.600671] env[68674]: value = "task-3239563" [ 619.600671] env[68674]: _type = "Task" [ 619.600671] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.638893] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239563, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.668284] env[68674]: DEBUG nova.compute.manager [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 619.708627] env[68674]: DEBUG oslo_vmware.api [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239559, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.720269] env[68674]: DEBUG oslo_concurrency.lockutils [req-791b088d-4aec-4338-a743-78dd5241956d req-ed2bcdf2-3ebd-4b6f-894e-0fa0fecc5e7c service nova] Releasing lock "refresh_cache-627fb348-1749-4480-97b9-b479a182d4ee" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 619.720269] env[68674]: DEBUG nova.compute.manager [req-791b088d-4aec-4338-a743-78dd5241956d req-ed2bcdf2-3ebd-4b6f-894e-0fa0fecc5e7c service nova] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Received event network-changed-28a81236-a1d4-4c87-86fb-5ad97008d9bb {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 619.720269] env[68674]: DEBUG nova.compute.manager [req-791b088d-4aec-4338-a743-78dd5241956d req-ed2bcdf2-3ebd-4b6f-894e-0fa0fecc5e7c service nova] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Refreshing instance network info cache due to event network-changed-28a81236-a1d4-4c87-86fb-5ad97008d9bb. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 619.720619] env[68674]: DEBUG oslo_concurrency.lockutils [req-791b088d-4aec-4338-a743-78dd5241956d req-ed2bcdf2-3ebd-4b6f-894e-0fa0fecc5e7c service nova] Acquiring lock "refresh_cache-ae945f3f-fde8-4b25-a5bd-81014fc99690" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.720619] env[68674]: DEBUG oslo_concurrency.lockutils [req-791b088d-4aec-4338-a743-78dd5241956d req-ed2bcdf2-3ebd-4b6f-894e-0fa0fecc5e7c service nova] Acquired lock "refresh_cache-ae945f3f-fde8-4b25-a5bd-81014fc99690" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 619.720705] env[68674]: DEBUG nova.network.neutron [req-791b088d-4aec-4338-a743-78dd5241956d req-ed2bcdf2-3ebd-4b6f-894e-0fa0fecc5e7c service nova] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Refreshing network info cache for port 28a81236-a1d4-4c87-86fb-5ad97008d9bb {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 619.779664] env[68674]: DEBUG oslo_vmware.api [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d061a5-430b-8769-5275-a6641aa8607f, 'name': SearchDatastore_Task, 'duration_secs': 0.018211} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.779664] env[68674]: DEBUG oslo_concurrency.lockutils [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 619.779664] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 627fb348-1749-4480-97b9-b479a182d4ee/627fb348-1749-4480-97b9-b479a182d4ee.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 619.779664] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-23173a85-ab28-4283-bca7-54bd41313fea {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.791549] env[68674]: DEBUG oslo_vmware.api [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Waiting for the task: (returnval){ [ 619.791549] env[68674]: value = "task-3239564" [ 619.791549] env[68674]: _type = "Task" [ 619.791549] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.815505] env[68674]: DEBUG oslo_vmware.api [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': task-3239564, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.863047] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-859c29a2-2b54-43c4-93c2-e7637329e564 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.874650] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e17d96b-4f03-43ff-b481-5f3ef42b44f3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.919651] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4fd59c2-13e3-4a49-a635-d1c7cbe08af3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.930698] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4119d502-fd01-448a-8986-abaedf4e5ce1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.951803] env[68674]: DEBUG oslo_concurrency.lockutils [req-016de102-9c32-4d9d-9a85-278b217a8a91 req-1c5d0fd4-789b-48b7-804f-de527bef0eda service nova] Releasing lock "refresh_cache-fa89e0b5-590d-43fb-bb11-02f8fdee0c2f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 619.953051] env[68674]: DEBUG nova.compute.provider_tree [None req-590e8e33-94c0-4866-9706-0165d13ef7e1 tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 620.074258] env[68674]: DEBUG oslo_vmware.api [None req-45a1966e-bced-4717-812f-c2f711671660 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239562, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.113675] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239563, 'name': CreateVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.200340] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 620.212358] env[68674]: DEBUG oslo_vmware.api [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239559, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.748259} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.212358] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 8790d635-fec5-4dcf-8cb0-220c2edec971/8790d635-fec5-4dcf-8cb0-220c2edec971.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 620.213992] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 620.213992] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-50a51969-c851-45e3-81bb-e52f9dee8873 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.228988] env[68674]: DEBUG oslo_vmware.api [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 620.228988] env[68674]: value = "task-3239566" [ 620.228988] env[68674]: _type = "Task" [ 620.228988] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.248167] env[68674]: DEBUG oslo_vmware.api [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239566, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.312666] env[68674]: DEBUG oslo_vmware.api [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': task-3239564, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.459659] env[68674]: DEBUG nova.scheduler.client.report [None req-590e8e33-94c0-4866-9706-0165d13ef7e1 tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 620.576759] env[68674]: DEBUG oslo_vmware.api [None req-45a1966e-bced-4717-812f-c2f711671660 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239562, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.601560] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "505b0352-39ab-4841-8766-14626af2b13e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 620.601791] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "505b0352-39ab-4841-8766-14626af2b13e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 620.617628] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239563, 'name': CreateVM_Task, 'duration_secs': 0.729105} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.619515] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 620.620478] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.620678] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 620.620995] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 620.621300] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ffd599a0-c81e-49db-aa9b-dc93cd7a7b50 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.629729] env[68674]: DEBUG oslo_vmware.api [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Waiting for the task: (returnval){ [ 620.629729] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524b30a0-1071-c22b-6730-9d7835d1a370" [ 620.629729] env[68674]: _type = "Task" [ 620.629729] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.642455] env[68674]: DEBUG oslo_vmware.api [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524b30a0-1071-c22b-6730-9d7835d1a370, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.703526] env[68674]: DEBUG nova.network.neutron [req-791b088d-4aec-4338-a743-78dd5241956d req-ed2bcdf2-3ebd-4b6f-894e-0fa0fecc5e7c service nova] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Updated VIF entry in instance network info cache for port 28a81236-a1d4-4c87-86fb-5ad97008d9bb. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 620.703907] env[68674]: DEBUG nova.network.neutron [req-791b088d-4aec-4338-a743-78dd5241956d req-ed2bcdf2-3ebd-4b6f-894e-0fa0fecc5e7c service nova] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Updating instance_info_cache with network_info: [{"id": "28a81236-a1d4-4c87-86fb-5ad97008d9bb", "address": "fa:16:3e:41:a3:bb", "network": {"id": "d227e0e9-9502-4493-be37-6a2c081b8bc2", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1059296354-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.146", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74387cd2a0404c98929ed093c4f70b62", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28a81236-a1", "ovs_interfaceid": "28a81236-a1d4-4c87-86fb-5ad97008d9bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.745225] env[68674]: DEBUG oslo_vmware.api [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239566, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.125089} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.745225] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 620.745225] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b30b874b-209e-4c4e-998c-8f9b4cfbe67c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.770226] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Reconfiguring VM instance instance-00000012 to attach disk [datastore2] 8790d635-fec5-4dcf-8cb0-220c2edec971/8790d635-fec5-4dcf-8cb0-220c2edec971.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 620.770226] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-87cd4893-bbc5-4437-b9cf-7e423d152cf5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.798288] env[68674]: DEBUG oslo_vmware.api [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 620.798288] env[68674]: value = "task-3239567" [ 620.798288] env[68674]: _type = "Task" [ 620.798288] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.811381] env[68674]: DEBUG oslo_vmware.api [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239567, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.815639] env[68674]: DEBUG oslo_vmware.api [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': task-3239564, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.631247} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.815639] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 627fb348-1749-4480-97b9-b479a182d4ee/627fb348-1749-4480-97b9-b479a182d4ee.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 620.815639] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 620.815639] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dd195fa6-93b4-4ada-8997-4e62bb61c7ed {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.823633] env[68674]: DEBUG oslo_vmware.api [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Waiting for the task: (returnval){ [ 620.823633] env[68674]: value = "task-3239568" [ 620.823633] env[68674]: _type = "Task" [ 620.823633] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.835562] env[68674]: DEBUG oslo_vmware.api [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': task-3239568, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.970517] env[68674]: DEBUG oslo_concurrency.lockutils [None req-590e8e33-94c0-4866-9706-0165d13ef7e1 tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.267s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 620.970517] env[68674]: DEBUG oslo_concurrency.lockutils [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.021s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 620.970517] env[68674]: DEBUG nova.objects.instance [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Lazy-loading 'resources' on Instance uuid 89ccc16e-d0e5-4f7d-985c-8693188e7ed5 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 620.989974] env[68674]: DEBUG nova.compute.manager [req-58428914-99dc-48e6-bdf5-0974842c64b5 req-386acbbb-8086-411a-9d9d-2ec28a578134 service nova] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Received event network-changed-c0118b5a-b34d-4c54-8270-5f4ce3b9e18d {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 620.990204] env[68674]: DEBUG nova.compute.manager [req-58428914-99dc-48e6-bdf5-0974842c64b5 req-386acbbb-8086-411a-9d9d-2ec28a578134 service nova] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Refreshing instance network info cache due to event network-changed-c0118b5a-b34d-4c54-8270-5f4ce3b9e18d. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 620.990440] env[68674]: DEBUG oslo_concurrency.lockutils [req-58428914-99dc-48e6-bdf5-0974842c64b5 req-386acbbb-8086-411a-9d9d-2ec28a578134 service nova] Acquiring lock "refresh_cache-f9168b78-ed64-4109-84f0-db0af61d2f10" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.990693] env[68674]: DEBUG oslo_concurrency.lockutils [req-58428914-99dc-48e6-bdf5-0974842c64b5 req-386acbbb-8086-411a-9d9d-2ec28a578134 service nova] Acquired lock "refresh_cache-f9168b78-ed64-4109-84f0-db0af61d2f10" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 620.990775] env[68674]: DEBUG nova.network.neutron [req-58428914-99dc-48e6-bdf5-0974842c64b5 req-386acbbb-8086-411a-9d9d-2ec28a578134 service nova] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Refreshing network info cache for port c0118b5a-b34d-4c54-8270-5f4ce3b9e18d {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 620.995755] env[68674]: INFO nova.scheduler.client.report [None req-590e8e33-94c0-4866-9706-0165d13ef7e1 tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Deleted allocations for instance d1c7a508-7d45-4eff-bb06-b85bfe392772 [ 621.076751] env[68674]: DEBUG oslo_vmware.api [None req-45a1966e-bced-4717-812f-c2f711671660 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239562, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.145177] env[68674]: DEBUG oslo_vmware.api [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524b30a0-1071-c22b-6730-9d7835d1a370, 'name': SearchDatastore_Task, 'duration_secs': 0.021144} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.145578] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 621.145913] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 621.146358] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.146661] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 621.146888] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 621.147373] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e6b82fd5-0531-41ce-b4b9-c4782162d719 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.167019] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 621.167019] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 621.167019] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2efa4a5d-dafb-4957-b9fc-f0de26c2afd8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.173662] env[68674]: DEBUG oslo_vmware.api [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Waiting for the task: (returnval){ [ 621.173662] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525378c7-ee3e-dc13-236f-71ce0392ed0b" [ 621.173662] env[68674]: _type = "Task" [ 621.173662] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.182830] env[68674]: DEBUG oslo_vmware.api [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525378c7-ee3e-dc13-236f-71ce0392ed0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.208788] env[68674]: DEBUG oslo_concurrency.lockutils [req-791b088d-4aec-4338-a743-78dd5241956d req-ed2bcdf2-3ebd-4b6f-894e-0fa0fecc5e7c service nova] Releasing lock "refresh_cache-ae945f3f-fde8-4b25-a5bd-81014fc99690" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 621.209116] env[68674]: DEBUG nova.compute.manager [req-791b088d-4aec-4338-a743-78dd5241956d req-ed2bcdf2-3ebd-4b6f-894e-0fa0fecc5e7c service nova] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Received event network-vif-plugged-c0118b5a-b34d-4c54-8270-5f4ce3b9e18d {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 621.209392] env[68674]: DEBUG oslo_concurrency.lockutils [req-791b088d-4aec-4338-a743-78dd5241956d req-ed2bcdf2-3ebd-4b6f-894e-0fa0fecc5e7c service nova] Acquiring lock "f9168b78-ed64-4109-84f0-db0af61d2f10-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 621.209667] env[68674]: DEBUG oslo_concurrency.lockutils [req-791b088d-4aec-4338-a743-78dd5241956d req-ed2bcdf2-3ebd-4b6f-894e-0fa0fecc5e7c service nova] Lock "f9168b78-ed64-4109-84f0-db0af61d2f10-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 621.209868] env[68674]: DEBUG oslo_concurrency.lockutils [req-791b088d-4aec-4338-a743-78dd5241956d req-ed2bcdf2-3ebd-4b6f-894e-0fa0fecc5e7c service nova] Lock "f9168b78-ed64-4109-84f0-db0af61d2f10-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 621.214029] env[68674]: DEBUG nova.compute.manager [req-791b088d-4aec-4338-a743-78dd5241956d req-ed2bcdf2-3ebd-4b6f-894e-0fa0fecc5e7c service nova] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] No waiting events found dispatching network-vif-plugged-c0118b5a-b34d-4c54-8270-5f4ce3b9e18d {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 621.214029] env[68674]: WARNING nova.compute.manager [req-791b088d-4aec-4338-a743-78dd5241956d req-ed2bcdf2-3ebd-4b6f-894e-0fa0fecc5e7c service nova] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Received unexpected event network-vif-plugged-c0118b5a-b34d-4c54-8270-5f4ce3b9e18d for instance with vm_state building and task_state spawning. [ 621.313983] env[68674]: DEBUG oslo_vmware.api [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239567, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.334355] env[68674]: DEBUG oslo_vmware.api [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': task-3239568, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.332043} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.334705] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 621.335543] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d69f6f01-27e8-49f4-ae67-fae7b217099e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.364295] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Reconfiguring VM instance instance-00000017 to attach disk [datastore2] 627fb348-1749-4480-97b9-b479a182d4ee/627fb348-1749-4480-97b9-b479a182d4ee.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 621.364446] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-05683087-83e9-4444-998d-cf49d5addb91 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.388697] env[68674]: DEBUG oslo_vmware.api [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Waiting for the task: (returnval){ [ 621.388697] env[68674]: value = "task-3239569" [ 621.388697] env[68674]: _type = "Task" [ 621.388697] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.401620] env[68674]: DEBUG oslo_vmware.api [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': task-3239569, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.511030] env[68674]: DEBUG oslo_concurrency.lockutils [None req-590e8e33-94c0-4866-9706-0165d13ef7e1 tempest-ServerExternalEventsTest-1160184302 tempest-ServerExternalEventsTest-1160184302-project-member] Lock "d1c7a508-7d45-4eff-bb06-b85bfe392772" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.928s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 621.579669] env[68674]: DEBUG oslo_vmware.api [None req-45a1966e-bced-4717-812f-c2f711671660 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239562, 'name': CloneVM_Task} progress is 95%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.693508] env[68674]: DEBUG oslo_vmware.api [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525378c7-ee3e-dc13-236f-71ce0392ed0b, 'name': SearchDatastore_Task, 'duration_secs': 0.03232} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.694371] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0775f63-5ff0-40a5-893a-70913e63f861 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.703655] env[68674]: DEBUG oslo_vmware.api [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Waiting for the task: (returnval){ [ 621.703655] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52773998-f0a4-8e7b-45e9-90b48ed83fb6" [ 621.703655] env[68674]: _type = "Task" [ 621.703655] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.712628] env[68674]: DEBUG oslo_vmware.api [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52773998-f0a4-8e7b-45e9-90b48ed83fb6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.822341] env[68674]: DEBUG oslo_vmware.api [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239567, 'name': ReconfigVM_Task, 'duration_secs': 0.945401} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.822341] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Reconfigured VM instance instance-00000012 to attach disk [datastore2] 8790d635-fec5-4dcf-8cb0-220c2edec971/8790d635-fec5-4dcf-8cb0-220c2edec971.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 621.824206] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f385ac06-acb0-4584-a59f-3ab5882cbee7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.835122] env[68674]: DEBUG oslo_vmware.api [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 621.835122] env[68674]: value = "task-3239570" [ 621.835122] env[68674]: _type = "Task" [ 621.835122] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.845486] env[68674]: DEBUG oslo_vmware.api [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239570, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.904369] env[68674]: DEBUG oslo_vmware.api [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': task-3239569, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.955917] env[68674]: DEBUG nova.network.neutron [req-58428914-99dc-48e6-bdf5-0974842c64b5 req-386acbbb-8086-411a-9d9d-2ec28a578134 service nova] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Updated VIF entry in instance network info cache for port c0118b5a-b34d-4c54-8270-5f4ce3b9e18d. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 621.955917] env[68674]: DEBUG nova.network.neutron [req-58428914-99dc-48e6-bdf5-0974842c64b5 req-386acbbb-8086-411a-9d9d-2ec28a578134 service nova] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Updating instance_info_cache with network_info: [{"id": "c0118b5a-b34d-4c54-8270-5f4ce3b9e18d", "address": "fa:16:3e:4d:39:f3", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.177", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0118b5a-b3", "ovs_interfaceid": "c0118b5a-b34d-4c54-8270-5f4ce3b9e18d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.084762] env[68674]: DEBUG oslo_vmware.api [None req-45a1966e-bced-4717-812f-c2f711671660 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239562, 'name': CloneVM_Task, 'duration_secs': 2.129274} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.085818] env[68674]: INFO nova.virt.vmwareapi.vmops [None req-45a1966e-bced-4717-812f-c2f711671660 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Created linked-clone VM from snapshot [ 622.086798] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7c4cbdc-23d5-45ba-810f-5650a96e9a50 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.096464] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-45a1966e-bced-4717-812f-c2f711671660 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Uploading image 6da24b67-71a4-4c41-b215-b9f29de7fe8d {{(pid=68674) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 622.119201] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-45a1966e-bced-4717-812f-c2f711671660 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Destroying the VM {{(pid=68674) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 622.119726] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c5c18e56-9026-4088-a792-b79f5f5f0e5a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.131591] env[68674]: DEBUG oslo_vmware.api [None req-45a1966e-bced-4717-812f-c2f711671660 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 622.131591] env[68674]: value = "task-3239571" [ 622.131591] env[68674]: _type = "Task" [ 622.131591] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.143583] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fef81e33-e314-453f-bcf1-03ec7fc12d2c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.152924] env[68674]: DEBUG oslo_vmware.api [None req-45a1966e-bced-4717-812f-c2f711671660 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239571, 'name': Destroy_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.159066] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99233053-cd19-4859-9d4f-66853c3d85e0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.198679] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7b50866-c03c-4e91-8946-3bf0936e1be4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.212243] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f925d97-1d0a-4ebc-8e02-e313ec590cf0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.230885] env[68674]: DEBUG nova.compute.provider_tree [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 622.235461] env[68674]: DEBUG oslo_vmware.api [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52773998-f0a4-8e7b-45e9-90b48ed83fb6, 'name': SearchDatastore_Task, 'duration_secs': 0.019154} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.235968] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 622.236260] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] f9168b78-ed64-4109-84f0-db0af61d2f10/f9168b78-ed64-4109-84f0-db0af61d2f10.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 622.236762] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fc67b645-1f25-48d8-ace3-37a78c297c76 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.245671] env[68674]: DEBUG oslo_vmware.api [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Waiting for the task: (returnval){ [ 622.245671] env[68674]: value = "task-3239572" [ 622.245671] env[68674]: _type = "Task" [ 622.245671] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.256280] env[68674]: DEBUG oslo_vmware.api [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Task: {'id': task-3239572, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.350927] env[68674]: DEBUG oslo_vmware.api [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239570, 'name': Rename_Task, 'duration_secs': 0.356454} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.351352] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 622.351681] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1dc989e0-6e4f-408d-84f4-99f1a984a48c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.361693] env[68674]: DEBUG oslo_vmware.api [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 622.361693] env[68674]: value = "task-3239574" [ 622.361693] env[68674]: _type = "Task" [ 622.361693] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.372842] env[68674]: DEBUG oslo_vmware.api [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239574, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.403804] env[68674]: DEBUG oslo_vmware.api [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': task-3239569, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.459133] env[68674]: DEBUG oslo_concurrency.lockutils [req-58428914-99dc-48e6-bdf5-0974842c64b5 req-386acbbb-8086-411a-9d9d-2ec28a578134 service nova] Releasing lock "refresh_cache-f9168b78-ed64-4109-84f0-db0af61d2f10" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 622.649872] env[68674]: DEBUG oslo_vmware.api [None req-45a1966e-bced-4717-812f-c2f711671660 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239571, 'name': Destroy_Task} progress is 33%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.759480] env[68674]: DEBUG oslo_vmware.api [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Task: {'id': task-3239572, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.770749] env[68674]: ERROR nova.scheduler.client.report [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] [req-3c8aab5c-689e-49fa-8e87-707d11b40ddb] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ade3f042-7427-494b-9654-0b65e074850c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-3c8aab5c-689e-49fa-8e87-707d11b40ddb"}]} [ 622.793074] env[68674]: DEBUG nova.scheduler.client.report [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Refreshing inventories for resource provider ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 622.809833] env[68674]: DEBUG nova.scheduler.client.report [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Updating ProviderTree inventory for provider ade3f042-7427-494b-9654-0b65e074850c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 622.810078] env[68674]: DEBUG nova.compute.provider_tree [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 622.825355] env[68674]: DEBUG nova.scheduler.client.report [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Refreshing aggregate associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, aggregates: None {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 622.853261] env[68674]: DEBUG nova.scheduler.client.report [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Refreshing trait associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 622.876234] env[68674]: DEBUG oslo_vmware.api [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239574, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.903634] env[68674]: DEBUG oslo_vmware.api [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': task-3239569, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.149710] env[68674]: DEBUG oslo_vmware.api [None req-45a1966e-bced-4717-812f-c2f711671660 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239571, 'name': Destroy_Task, 'duration_secs': 0.769889} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.150048] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-45a1966e-bced-4717-812f-c2f711671660 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Destroyed the VM [ 623.150394] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-45a1966e-bced-4717-812f-c2f711671660 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Deleting Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 623.152322] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-f1a2be62-05ca-435c-8082-78ecfed5f9f5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.162300] env[68674]: DEBUG oslo_vmware.api [None req-45a1966e-bced-4717-812f-c2f711671660 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 623.162300] env[68674]: value = "task-3239575" [ 623.162300] env[68674]: _type = "Task" [ 623.162300] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.174279] env[68674]: DEBUG oslo_vmware.api [None req-45a1966e-bced-4717-812f-c2f711671660 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239575, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.263265] env[68674]: DEBUG oslo_vmware.api [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Task: {'id': task-3239572, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.379854] env[68674]: DEBUG oslo_vmware.api [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239574, 'name': PowerOnVM_Task, 'duration_secs': 0.830323} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.383679] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 623.384292] env[68674]: DEBUG nova.compute.manager [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 623.385717] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c4490d3-6402-4828-8521-eaafb0082de7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.415779] env[68674]: DEBUG oslo_vmware.api [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': task-3239569, 'name': ReconfigVM_Task, 'duration_secs': 1.628331} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.416026] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Reconfigured VM instance instance-00000017 to attach disk [datastore2] 627fb348-1749-4480-97b9-b479a182d4ee/627fb348-1749-4480-97b9-b479a182d4ee.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 623.416738] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-38d0d37a-e693-4773-9db0-0edda0b36528 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.429868] env[68674]: DEBUG oslo_vmware.api [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Waiting for the task: (returnval){ [ 623.429868] env[68674]: value = "task-3239576" [ 623.429868] env[68674]: _type = "Task" [ 623.429868] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.446551] env[68674]: DEBUG oslo_vmware.api [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': task-3239576, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.478620] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d43902c7-a9bd-40c2-83a9-3de7b55dfc13 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.491012] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d997b593-3102-4d3c-bc1e-83663da17b9b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.539753] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd487f57-4e92-4597-bd0a-fb4135ecab95 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.545857] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db2a5997-9f78-4822-bbdf-340e2138e833 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.565072] env[68674]: DEBUG nova.compute.provider_tree [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 623.679508] env[68674]: DEBUG oslo_vmware.api [None req-45a1966e-bced-4717-812f-c2f711671660 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239575, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.773487] env[68674]: DEBUG oslo_vmware.api [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Task: {'id': task-3239572, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.313617} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.774497] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] f9168b78-ed64-4109-84f0-db0af61d2f10/f9168b78-ed64-4109-84f0-db0af61d2f10.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 623.774897] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 623.775760] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8c892c39-ee9f-4177-aaaa-c7cb6b78fc05 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.785633] env[68674]: DEBUG oslo_vmware.api [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Waiting for the task: (returnval){ [ 623.785633] env[68674]: value = "task-3239577" [ 623.785633] env[68674]: _type = "Task" [ 623.785633] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.799294] env[68674]: DEBUG oslo_vmware.api [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Task: {'id': task-3239577, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.918132] env[68674]: DEBUG oslo_concurrency.lockutils [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 623.944470] env[68674]: DEBUG oslo_vmware.api [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': task-3239576, 'name': Rename_Task, 'duration_secs': 0.31147} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.944790] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 623.946554] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b3c90f3a-9f0c-407a-8cbc-029e306facef {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.956326] env[68674]: DEBUG oslo_vmware.api [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Waiting for the task: (returnval){ [ 623.956326] env[68674]: value = "task-3239578" [ 623.956326] env[68674]: _type = "Task" [ 623.956326] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.970924] env[68674]: DEBUG oslo_vmware.api [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': task-3239578, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.112150] env[68674]: DEBUG nova.scheduler.client.report [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Updated inventory for provider ade3f042-7427-494b-9654-0b65e074850c with generation 44 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 624.112542] env[68674]: DEBUG nova.compute.provider_tree [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Updating resource provider ade3f042-7427-494b-9654-0b65e074850c generation from 44 to 45 during operation: update_inventory {{(pid=68674) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 624.112701] env[68674]: DEBUG nova.compute.provider_tree [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 624.177596] env[68674]: DEBUG oslo_vmware.api [None req-45a1966e-bced-4717-812f-c2f711671660 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239575, 'name': RemoveSnapshot_Task} progress is 76%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.311340] env[68674]: DEBUG oslo_vmware.api [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Task: {'id': task-3239577, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079965} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.311340] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 624.311340] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-708e6ba0-1092-4b17-ab04-57addf1da5fc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.351016] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Reconfiguring VM instance instance-00000018 to attach disk [datastore2] f9168b78-ed64-4109-84f0-db0af61d2f10/f9168b78-ed64-4109-84f0-db0af61d2f10.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 624.351016] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-713d1ffe-c29a-40e6-963c-400a93f55aee {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.381131] env[68674]: DEBUG oslo_vmware.api [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Waiting for the task: (returnval){ [ 624.381131] env[68674]: value = "task-3239579" [ 624.381131] env[68674]: _type = "Task" [ 624.381131] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.400421] env[68674]: DEBUG oslo_vmware.api [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Task: {'id': task-3239579, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.474731] env[68674]: DEBUG oslo_vmware.api [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': task-3239578, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.621477] env[68674]: DEBUG oslo_concurrency.lockutils [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.653s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 624.627131] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.595s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 624.628744] env[68674]: INFO nova.compute.claims [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: a62237a7-a123-4378-b655-d489ef08474b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 624.653630] env[68674]: INFO nova.scheduler.client.report [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Deleted allocations for instance 89ccc16e-d0e5-4f7d-985c-8693188e7ed5 [ 624.685934] env[68674]: DEBUG oslo_vmware.api [None req-45a1966e-bced-4717-812f-c2f711671660 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239575, 'name': RemoveSnapshot_Task, 'duration_secs': 1.259276} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.690016] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-45a1966e-bced-4717-812f-c2f711671660 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Deleted Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 624.899585] env[68674]: DEBUG oslo_vmware.api [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Task: {'id': task-3239579, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.970697] env[68674]: DEBUG oslo_vmware.api [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': task-3239578, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.167198] env[68674]: DEBUG oslo_concurrency.lockutils [None req-54305f27-131a-4611-aa87-e08a63516404 tempest-ServersAdmin275Test-711209233 tempest-ServersAdmin275Test-711209233-project-member] Lock "89ccc16e-d0e5-4f7d-985c-8693188e7ed5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.800s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 625.196369] env[68674]: WARNING nova.compute.manager [None req-45a1966e-bced-4717-812f-c2f711671660 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Image not found during snapshot: nova.exception.ImageNotFound: Image 6da24b67-71a4-4c41-b215-b9f29de7fe8d could not be found. [ 625.393638] env[68674]: DEBUG oslo_vmware.api [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Task: {'id': task-3239579, 'name': ReconfigVM_Task, 'duration_secs': 0.678492} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.393638] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Reconfigured VM instance instance-00000018 to attach disk [datastore2] f9168b78-ed64-4109-84f0-db0af61d2f10/f9168b78-ed64-4109-84f0-db0af61d2f10.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 625.394328] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d24f0238-695c-45e7-8c8a-2cedfc28ca5d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.402979] env[68674]: DEBUG oslo_vmware.api [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Waiting for the task: (returnval){ [ 625.402979] env[68674]: value = "task-3239580" [ 625.402979] env[68674]: _type = "Task" [ 625.402979] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.415676] env[68674]: DEBUG oslo_vmware.api [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Task: {'id': task-3239580, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.429896] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Acquiring lock "367461db-8bc4-4cf0-b7f6-f79ee2bf8589" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 625.430415] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Lock "367461db-8bc4-4cf0-b7f6-f79ee2bf8589" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 625.471924] env[68674]: DEBUG oslo_vmware.api [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': task-3239578, 'name': PowerOnVM_Task, 'duration_secs': 1.293421} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.472224] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 625.472435] env[68674]: INFO nova.compute.manager [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Took 12.06 seconds to spawn the instance on the hypervisor. [ 625.472618] env[68674]: DEBUG nova.compute.manager [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 625.473415] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aacb6422-aca4-422a-84ac-a1120d9305d0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.923194] env[68674]: DEBUG oslo_vmware.api [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Task: {'id': task-3239580, 'name': Rename_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.996769] env[68674]: INFO nova.compute.manager [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Took 29.09 seconds to build instance. [ 626.204564] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f8da106-edb3-43cb-83aa-ab7ab62a5328 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.216722] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18b90ef6-f240-4411-b9aa-4ccfde6582e4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.256147] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fbdb7c48-eeae-4e87-92c3-92e09b42a937 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "8790d635-fec5-4dcf-8cb0-220c2edec971" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 626.256496] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fbdb7c48-eeae-4e87-92c3-92e09b42a937 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "8790d635-fec5-4dcf-8cb0-220c2edec971" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 626.257118] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fbdb7c48-eeae-4e87-92c3-92e09b42a937 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "8790d635-fec5-4dcf-8cb0-220c2edec971-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 626.257118] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fbdb7c48-eeae-4e87-92c3-92e09b42a937 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "8790d635-fec5-4dcf-8cb0-220c2edec971-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 626.257118] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fbdb7c48-eeae-4e87-92c3-92e09b42a937 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "8790d635-fec5-4dcf-8cb0-220c2edec971-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 626.259956] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-484fa6d4-2ddd-4f51-80cc-9e2cd919fc60 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.263186] env[68674]: INFO nova.compute.manager [None req-fbdb7c48-eeae-4e87-92c3-92e09b42a937 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Terminating instance [ 626.275211] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a92ac476-6054-4761-9a6e-01a48df50054 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.295305] env[68674]: DEBUG nova.compute.provider_tree [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 626.419014] env[68674]: DEBUG oslo_vmware.api [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Task: {'id': task-3239580, 'name': Rename_Task, 'duration_secs': 0.791648} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.419014] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 626.419014] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5c413bf8-5bc2-450e-9a95-152b8229a6e7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.428185] env[68674]: DEBUG oslo_vmware.api [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Waiting for the task: (returnval){ [ 626.428185] env[68674]: value = "task-3239581" [ 626.428185] env[68674]: _type = "Task" [ 626.428185] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.441438] env[68674]: DEBUG oslo_vmware.api [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Task: {'id': task-3239581, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.501294] env[68674]: DEBUG oslo_concurrency.lockutils [None req-df2a99a2-913b-4d02-a9da-f9ece4c15f5b tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Lock "627fb348-1749-4480-97b9-b479a182d4ee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.305s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 626.770016] env[68674]: DEBUG nova.compute.manager [None req-fbdb7c48-eeae-4e87-92c3-92e09b42a937 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 626.770304] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-fbdb7c48-eeae-4e87-92c3-92e09b42a937 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 626.771246] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39908f9d-ea1b-4abf-bff0-ba3ffae2bdcb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.782888] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbdb7c48-eeae-4e87-92c3-92e09b42a937 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 626.783243] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9ae989b3-2a06-484c-9604-29256572eb56 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.792062] env[68674]: DEBUG oslo_vmware.api [None req-fbdb7c48-eeae-4e87-92c3-92e09b42a937 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 626.792062] env[68674]: value = "task-3239582" [ 626.792062] env[68674]: _type = "Task" [ 626.792062] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.800805] env[68674]: DEBUG nova.scheduler.client.report [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 626.810089] env[68674]: DEBUG oslo_vmware.api [None req-fbdb7c48-eeae-4e87-92c3-92e09b42a937 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239582, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.942338] env[68674]: DEBUG oslo_vmware.api [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Task: {'id': task-3239581, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.007523] env[68674]: DEBUG nova.compute.manager [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 627.058614] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4a8a9700-17bb-4583-88e2-83c4f4fd4a2c tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Acquiring lock "60ded0c9-7e20-4071-b5ce-9189d8d01d5c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 627.058877] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4a8a9700-17bb-4583-88e2-83c4f4fd4a2c tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Lock "60ded0c9-7e20-4071-b5ce-9189d8d01d5c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 627.059111] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4a8a9700-17bb-4583-88e2-83c4f4fd4a2c tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Acquiring lock "60ded0c9-7e20-4071-b5ce-9189d8d01d5c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 627.059311] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4a8a9700-17bb-4583-88e2-83c4f4fd4a2c tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Lock "60ded0c9-7e20-4071-b5ce-9189d8d01d5c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 627.059476] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4a8a9700-17bb-4583-88e2-83c4f4fd4a2c tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Lock "60ded0c9-7e20-4071-b5ce-9189d8d01d5c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 627.064576] env[68674]: INFO nova.compute.manager [None req-4a8a9700-17bb-4583-88e2-83c4f4fd4a2c tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Terminating instance [ 627.132899] env[68674]: DEBUG oslo_concurrency.lockutils [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Acquiring lock "1189fa93-608b-4684-a675-f1caf29a9f43" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 627.133818] env[68674]: DEBUG oslo_concurrency.lockutils [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Lock "1189fa93-608b-4684-a675-f1caf29a9f43" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 627.304423] env[68674]: DEBUG oslo_vmware.api [None req-fbdb7c48-eeae-4e87-92c3-92e09b42a937 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239582, 'name': PowerOffVM_Task, 'duration_secs': 0.24974} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.304837] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbdb7c48-eeae-4e87-92c3-92e09b42a937 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 627.305186] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-fbdb7c48-eeae-4e87-92c3-92e09b42a937 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 627.305553] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-96de3e4c-2913-4765-8599-2b418bc10856 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.314192] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.689s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 627.314192] env[68674]: DEBUG nova.compute.manager [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: a62237a7-a123-4378-b655-d489ef08474b] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 627.315758] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.573s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 627.319226] env[68674]: INFO nova.compute.claims [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 627.384483] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-fbdb7c48-eeae-4e87-92c3-92e09b42a937 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 627.384783] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-fbdb7c48-eeae-4e87-92c3-92e09b42a937 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 627.385019] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbdb7c48-eeae-4e87-92c3-92e09b42a937 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Deleting the datastore file [datastore2] 8790d635-fec5-4dcf-8cb0-220c2edec971 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 627.385355] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7b132151-9f39-4d7e-a36a-fe3cf27e717c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.394150] env[68674]: DEBUG oslo_vmware.api [None req-fbdb7c48-eeae-4e87-92c3-92e09b42a937 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 627.394150] env[68674]: value = "task-3239584" [ 627.394150] env[68674]: _type = "Task" [ 627.394150] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.404475] env[68674]: DEBUG oslo_vmware.api [None req-fbdb7c48-eeae-4e87-92c3-92e09b42a937 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239584, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.442584] env[68674]: DEBUG oslo_vmware.api [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Task: {'id': task-3239581, 'name': PowerOnVM_Task, 'duration_secs': 0.593238} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.442924] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 627.443172] env[68674]: INFO nova.compute.manager [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Took 11.24 seconds to spawn the instance on the hypervisor. [ 627.443395] env[68674]: DEBUG nova.compute.manager [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 627.444185] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ba8e677-4c58-4090-8382-317a61aa5457 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.539426] env[68674]: DEBUG oslo_concurrency.lockutils [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 627.575342] env[68674]: DEBUG nova.compute.manager [None req-4a8a9700-17bb-4583-88e2-83c4f4fd4a2c tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 627.575575] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4a8a9700-17bb-4583-88e2-83c4f4fd4a2c tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 627.576539] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60b6b8aa-9786-4cbc-90ff-cbdea6468bad {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.588546] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a8a9700-17bb-4583-88e2-83c4f4fd4a2c tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 627.588813] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7ef3aa45-a047-4ff3-82db-fdc93206878f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.597871] env[68674]: DEBUG oslo_vmware.api [None req-4a8a9700-17bb-4583-88e2-83c4f4fd4a2c tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 627.597871] env[68674]: value = "task-3239585" [ 627.597871] env[68674]: _type = "Task" [ 627.597871] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.607799] env[68674]: DEBUG oslo_vmware.api [None req-4a8a9700-17bb-4583-88e2-83c4f4fd4a2c tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239585, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.698047] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "d88ccf9b-7432-4be0-82f7-b2a9155f7d86" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 627.698345] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "d88ccf9b-7432-4be0-82f7-b2a9155f7d86" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 627.830164] env[68674]: DEBUG nova.compute.utils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 627.836298] env[68674]: DEBUG nova.compute.manager [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: a62237a7-a123-4378-b655-d489ef08474b] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 627.836566] env[68674]: DEBUG nova.network.neutron [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: a62237a7-a123-4378-b655-d489ef08474b] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 627.905338] env[68674]: DEBUG oslo_vmware.api [None req-fbdb7c48-eeae-4e87-92c3-92e09b42a937 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239584, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.312984} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.905470] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbdb7c48-eeae-4e87-92c3-92e09b42a937 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 627.905602] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-fbdb7c48-eeae-4e87-92c3-92e09b42a937 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 627.905792] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-fbdb7c48-eeae-4e87-92c3-92e09b42a937 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 627.906018] env[68674]: INFO nova.compute.manager [None req-fbdb7c48-eeae-4e87-92c3-92e09b42a937 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Took 1.14 seconds to destroy the instance on the hypervisor. [ 627.906296] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fbdb7c48-eeae-4e87-92c3-92e09b42a937 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 627.906539] env[68674]: DEBUG nova.compute.manager [-] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 627.906671] env[68674]: DEBUG nova.network.neutron [-] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 627.928977] env[68674]: DEBUG nova.policy [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c6a2f94537e34cfd98e83f6af711087d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2974aa035d5d4591827e7713a403116d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 627.969248] env[68674]: INFO nova.compute.manager [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Took 27.74 seconds to build instance. [ 628.109508] env[68674]: DEBUG oslo_vmware.api [None req-4a8a9700-17bb-4583-88e2-83c4f4fd4a2c tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239585, 'name': PowerOffVM_Task, 'duration_secs': 0.293969} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.109788] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a8a9700-17bb-4583-88e2-83c4f4fd4a2c tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 628.109963] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4a8a9700-17bb-4583-88e2-83c4f4fd4a2c tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 628.110265] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9265f335-bfb9-4ec3-99aa-7aa56569a7a4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.228884] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4a8a9700-17bb-4583-88e2-83c4f4fd4a2c tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 628.229018] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4a8a9700-17bb-4583-88e2-83c4f4fd4a2c tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 628.231703] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a8a9700-17bb-4583-88e2-83c4f4fd4a2c tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Deleting the datastore file [datastore2] 60ded0c9-7e20-4071-b5ce-9189d8d01d5c {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 628.231703] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5c1498b2-0c99-4540-b629-5d118f77a267 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.241037] env[68674]: DEBUG oslo_vmware.api [None req-4a8a9700-17bb-4583-88e2-83c4f4fd4a2c tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 628.241037] env[68674]: value = "task-3239587" [ 628.241037] env[68674]: _type = "Task" [ 628.241037] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.252893] env[68674]: DEBUG oslo_vmware.api [None req-4a8a9700-17bb-4583-88e2-83c4f4fd4a2c tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239587, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.291340] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Acquiring lock "3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 628.292497] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Lock "3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 628.355068] env[68674]: DEBUG nova.compute.manager [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: a62237a7-a123-4378-b655-d489ef08474b] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 628.435207] env[68674]: DEBUG oslo_concurrency.lockutils [None req-17b897d9-07f5-4fee-8b0d-75ba33635ac6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquiring lock "7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 628.435207] env[68674]: DEBUG oslo_concurrency.lockutils [None req-17b897d9-07f5-4fee-8b0d-75ba33635ac6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lock "7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 628.435207] env[68674]: DEBUG oslo_concurrency.lockutils [None req-17b897d9-07f5-4fee-8b0d-75ba33635ac6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquiring lock "7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 628.435207] env[68674]: DEBUG oslo_concurrency.lockutils [None req-17b897d9-07f5-4fee-8b0d-75ba33635ac6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lock "7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 628.435408] env[68674]: DEBUG oslo_concurrency.lockutils [None req-17b897d9-07f5-4fee-8b0d-75ba33635ac6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lock "7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 628.437451] env[68674]: INFO nova.compute.manager [None req-17b897d9-07f5-4fee-8b0d-75ba33635ac6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Terminating instance [ 628.473465] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9fa7fb80-88ed-4524-9a09-d141d4ddec02 tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Lock "f9168b78-ed64-4109-84f0-db0af61d2f10" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.133s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 628.764348] env[68674]: DEBUG oslo_vmware.api [None req-4a8a9700-17bb-4583-88e2-83c4f4fd4a2c tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239587, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.200465} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.764603] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a8a9700-17bb-4583-88e2-83c4f4fd4a2c tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 628.764777] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4a8a9700-17bb-4583-88e2-83c4f4fd4a2c tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 628.764954] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4a8a9700-17bb-4583-88e2-83c4f4fd4a2c tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 628.765183] env[68674]: INFO nova.compute.manager [None req-4a8a9700-17bb-4583-88e2-83c4f4fd4a2c tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Took 1.19 seconds to destroy the instance on the hypervisor. [ 628.765429] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4a8a9700-17bb-4583-88e2-83c4f4fd4a2c tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 628.765872] env[68674]: DEBUG nova.compute.manager [-] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 628.766589] env[68674]: DEBUG nova.network.neutron [-] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 628.953392] env[68674]: DEBUG nova.network.neutron [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: a62237a7-a123-4378-b655-d489ef08474b] Successfully created port: 3be77e6c-a481-49ae-b015-141b0a40fe08 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 628.959095] env[68674]: DEBUG nova.compute.manager [None req-17b897d9-07f5-4fee-8b0d-75ba33635ac6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 628.959095] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-17b897d9-07f5-4fee-8b0d-75ba33635ac6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 628.962687] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baa55845-09a1-4662-aa40-5d27d82a16fa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.979546] env[68674]: DEBUG nova.compute.manager [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 628.985919] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-17b897d9-07f5-4fee-8b0d-75ba33635ac6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 628.986438] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2eab19de-0ee4-4357-a21c-913e2598f99b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.998048] env[68674]: DEBUG oslo_vmware.api [None req-17b897d9-07f5-4fee-8b0d-75ba33635ac6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for the task: (returnval){ [ 628.998048] env[68674]: value = "task-3239588" [ 628.998048] env[68674]: _type = "Task" [ 628.998048] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.015634] env[68674]: DEBUG oslo_vmware.api [None req-17b897d9-07f5-4fee-8b0d-75ba33635ac6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239588, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.066087] env[68674]: DEBUG nova.compute.manager [None req-0fe55e14-b6c7-4d2b-b817-4e61f3f3e7e2 tempest-ServerDiagnosticsTest-740736325 tempest-ServerDiagnosticsTest-740736325-project-admin] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 629.069112] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53e45b06-28cf-4d11-81fa-2110623a86cb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.075568] env[68674]: INFO nova.compute.manager [None req-0fe55e14-b6c7-4d2b-b817-4e61f3f3e7e2 tempest-ServerDiagnosticsTest-740736325 tempest-ServerDiagnosticsTest-740736325-project-admin] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Retrieving diagnostics [ 629.076151] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2229f81b-8010-4c2d-9b09-a6e0ff9b09ba {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.185540] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4056552b-7394-4b53-aa01-5c160d051f5d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.194070] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4af093d-bccb-41db-9cb4-ff536a81ba5f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.234926] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f392b1c4-1042-4c4b-a25d-e27e96e3d56f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.246036] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e75eba6-7433-40e9-89e8-0b54bfbe411d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.251221] env[68674]: DEBUG nova.network.neutron [-] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 629.262579] env[68674]: DEBUG nova.compute.provider_tree [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 629.382405] env[68674]: DEBUG nova.compute.manager [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: a62237a7-a123-4378-b655-d489ef08474b] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 629.427009] env[68674]: DEBUG nova.virt.hardware [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 629.427292] env[68674]: DEBUG nova.virt.hardware [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 629.427445] env[68674]: DEBUG nova.virt.hardware [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 629.427617] env[68674]: DEBUG nova.virt.hardware [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 629.427756] env[68674]: DEBUG nova.virt.hardware [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 629.427894] env[68674]: DEBUG nova.virt.hardware [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 629.428125] env[68674]: DEBUG nova.virt.hardware [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 629.428470] env[68674]: DEBUG nova.virt.hardware [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 629.428703] env[68674]: DEBUG nova.virt.hardware [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 629.428872] env[68674]: DEBUG nova.virt.hardware [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 629.429052] env[68674]: DEBUG nova.virt.hardware [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 629.429909] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfdc10b4-900d-48ce-9796-d41d06f352f6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.438837] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09fd0b96-cb24-4f87-8537-b5bf683638ec {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.513472] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 629.513780] env[68674]: DEBUG oslo_vmware.api [None req-17b897d9-07f5-4fee-8b0d-75ba33635ac6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239588, 'name': PowerOffVM_Task, 'duration_secs': 0.379799} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.514080] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-17b897d9-07f5-4fee-8b0d-75ba33635ac6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 629.514290] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-17b897d9-07f5-4fee-8b0d-75ba33635ac6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 629.514578] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-89153fe6-6005-4b0f-a9b7-93b0e20b0480 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.739058] env[68674]: DEBUG nova.network.neutron [-] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 629.754725] env[68674]: INFO nova.compute.manager [-] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Took 1.85 seconds to deallocate network for instance. [ 629.795041] env[68674]: ERROR nova.scheduler.client.report [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [req-bdc6bff3-d33a-45cc-92a6-3b481f8f4583] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ade3f042-7427-494b-9654-0b65e074850c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-bdc6bff3-d33a-45cc-92a6-3b481f8f4583"}]} [ 629.830093] env[68674]: DEBUG nova.scheduler.client.report [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Refreshing inventories for resource provider ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 629.836369] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-17b897d9-07f5-4fee-8b0d-75ba33635ac6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 629.836768] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-17b897d9-07f5-4fee-8b0d-75ba33635ac6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 629.837300] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-17b897d9-07f5-4fee-8b0d-75ba33635ac6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Deleting the datastore file [datastore1] 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 629.843349] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cb3b5e85-4a61-4dcd-ad3d-5cf064c3cf85 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.850683] env[68674]: DEBUG nova.scheduler.client.report [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Updating ProviderTree inventory for provider ade3f042-7427-494b-9654-0b65e074850c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 629.851935] env[68674]: DEBUG nova.compute.provider_tree [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 629.855179] env[68674]: DEBUG oslo_vmware.api [None req-17b897d9-07f5-4fee-8b0d-75ba33635ac6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for the task: (returnval){ [ 629.855179] env[68674]: value = "task-3239590" [ 629.855179] env[68674]: _type = "Task" [ 629.855179] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.870068] env[68674]: DEBUG oslo_vmware.api [None req-17b897d9-07f5-4fee-8b0d-75ba33635ac6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239590, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.874884] env[68674]: DEBUG nova.compute.manager [req-ea5c1079-9bfb-4afe-8011-883e0c0cf812 req-366f1110-ec2b-4f86-8023-d407c8826e55 service nova] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Received event network-vif-deleted-926b65c9-79eb-4f2d-88ef-f00c20e240f5 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 629.879403] env[68674]: DEBUG nova.scheduler.client.report [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Refreshing aggregate associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, aggregates: None {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 629.907217] env[68674]: DEBUG nova.scheduler.client.report [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Refreshing trait associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 630.765025] env[68674]: INFO nova.compute.manager [-] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Took 2.00 seconds to deallocate network for instance. [ 630.765025] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fbdb7c48-eeae-4e87-92c3-92e09b42a937 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 630.773708] env[68674]: DEBUG nova.network.neutron [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: a62237a7-a123-4378-b655-d489ef08474b] Successfully updated port: 3be77e6c-a481-49ae-b015-141b0a40fe08 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 630.779398] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Acquiring lock "1b405b1f-ee1f-4e6e-9355-de8b5c26ab49" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 630.779628] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Lock "1b405b1f-ee1f-4e6e-9355-de8b5c26ab49" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 630.793083] env[68674]: DEBUG oslo_vmware.api [None req-17b897d9-07f5-4fee-8b0d-75ba33635ac6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239590, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.421217} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.793769] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-17b897d9-07f5-4fee-8b0d-75ba33635ac6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 630.793769] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-17b897d9-07f5-4fee-8b0d-75ba33635ac6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 630.793769] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-17b897d9-07f5-4fee-8b0d-75ba33635ac6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 630.793886] env[68674]: INFO nova.compute.manager [None req-17b897d9-07f5-4fee-8b0d-75ba33635ac6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Took 1.84 seconds to destroy the instance on the hypervisor. [ 630.794127] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-17b897d9-07f5-4fee-8b0d-75ba33635ac6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 630.794752] env[68674]: DEBUG nova.compute.manager [-] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 630.794752] env[68674]: DEBUG nova.network.neutron [-] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 630.876019] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Acquiring lock "3c8459db-cc54-4644-8e4c-83c87017a186" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 630.876019] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Lock "3c8459db-cc54-4644-8e4c-83c87017a186" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.273186] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Acquiring lock "f9168b78-ed64-4109-84f0-db0af61d2f10" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.273186] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Lock "f9168b78-ed64-4109-84f0-db0af61d2f10" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.273186] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Acquiring lock "f9168b78-ed64-4109-84f0-db0af61d2f10-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.273546] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Lock "f9168b78-ed64-4109-84f0-db0af61d2f10-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.273546] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Lock "f9168b78-ed64-4109-84f0-db0af61d2f10-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 631.275448] env[68674]: INFO nova.compute.manager [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Terminating instance [ 631.282270] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4a8a9700-17bb-4583-88e2-83c4f4fd4a2c tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.282270] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Acquiring lock "refresh_cache-a62237a7-a123-4378-b655-d489ef08474b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 631.282360] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Acquired lock "refresh_cache-a62237a7-a123-4378-b655-d489ef08474b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 631.282705] env[68674]: DEBUG nova.network.neutron [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: a62237a7-a123-4378-b655-d489ef08474b] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 631.294244] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d893ab9b-3c50-435f-bb9f-c24680e3c495 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.303645] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fa7523b-2be0-4707-833d-fd813a147b0b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.341676] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f396e62e-6b93-4ed2-bddd-b3146dea841a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.350080] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-298de8dc-4db9-4d96-84bb-cf7fe5fb994c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.365995] env[68674]: DEBUG nova.compute.provider_tree [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 631.692532] env[68674]: DEBUG nova.network.neutron [-] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 631.782932] env[68674]: DEBUG nova.compute.manager [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 631.783181] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 631.784058] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe87b3f8-0d18-41c7-8f8b-653d00218874 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.806043] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 631.809919] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-45aa84fb-1cc6-4467-b15a-bc1130427829 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.823012] env[68674]: DEBUG oslo_vmware.api [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Waiting for the task: (returnval){ [ 631.823012] env[68674]: value = "task-3239591" [ 631.823012] env[68674]: _type = "Task" [ 631.823012] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.839315] env[68674]: DEBUG oslo_vmware.api [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Task: {'id': task-3239591, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.874138] env[68674]: DEBUG nova.network.neutron [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: a62237a7-a123-4378-b655-d489ef08474b] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 631.899997] env[68674]: ERROR nova.scheduler.client.report [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [req-8bf1bc9d-d16b-412f-911a-7f1e97d9dda9] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ade3f042-7427-494b-9654-0b65e074850c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-8bf1bc9d-d16b-412f-911a-7f1e97d9dda9"}]} [ 631.920764] env[68674]: DEBUG nova.scheduler.client.report [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Refreshing inventories for resource provider ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 631.946580] env[68674]: DEBUG nova.scheduler.client.report [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Updating ProviderTree inventory for provider ade3f042-7427-494b-9654-0b65e074850c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 631.946810] env[68674]: DEBUG nova.compute.provider_tree [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 631.969615] env[68674]: DEBUG nova.scheduler.client.report [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Refreshing aggregate associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, aggregates: None {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 631.995344] env[68674]: DEBUG nova.scheduler.client.report [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Refreshing trait associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 632.190021] env[68674]: DEBUG nova.compute.manager [req-daf1b360-c01d-4aba-a8dc-1e924c73c97b req-aa68f5d0-ef88-4fa2-9c70-10a5e0c60067 service nova] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Received event network-vif-deleted-00325c29-c6d8-4e3b-9410-aac7c5b9d5d2 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 632.191268] env[68674]: DEBUG nova.compute.manager [req-daf1b360-c01d-4aba-a8dc-1e924c73c97b req-aa68f5d0-ef88-4fa2-9c70-10a5e0c60067 service nova] [instance: a62237a7-a123-4378-b655-d489ef08474b] Received event network-vif-plugged-3be77e6c-a481-49ae-b015-141b0a40fe08 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 632.191513] env[68674]: DEBUG oslo_concurrency.lockutils [req-daf1b360-c01d-4aba-a8dc-1e924c73c97b req-aa68f5d0-ef88-4fa2-9c70-10a5e0c60067 service nova] Acquiring lock "a62237a7-a123-4378-b655-d489ef08474b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 632.192092] env[68674]: DEBUG oslo_concurrency.lockutils [req-daf1b360-c01d-4aba-a8dc-1e924c73c97b req-aa68f5d0-ef88-4fa2-9c70-10a5e0c60067 service nova] Lock "a62237a7-a123-4378-b655-d489ef08474b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 632.192277] env[68674]: DEBUG oslo_concurrency.lockutils [req-daf1b360-c01d-4aba-a8dc-1e924c73c97b req-aa68f5d0-ef88-4fa2-9c70-10a5e0c60067 service nova] Lock "a62237a7-a123-4378-b655-d489ef08474b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 632.192464] env[68674]: DEBUG nova.compute.manager [req-daf1b360-c01d-4aba-a8dc-1e924c73c97b req-aa68f5d0-ef88-4fa2-9c70-10a5e0c60067 service nova] [instance: a62237a7-a123-4378-b655-d489ef08474b] No waiting events found dispatching network-vif-plugged-3be77e6c-a481-49ae-b015-141b0a40fe08 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 632.192807] env[68674]: WARNING nova.compute.manager [req-daf1b360-c01d-4aba-a8dc-1e924c73c97b req-aa68f5d0-ef88-4fa2-9c70-10a5e0c60067 service nova] [instance: a62237a7-a123-4378-b655-d489ef08474b] Received unexpected event network-vif-plugged-3be77e6c-a481-49ae-b015-141b0a40fe08 for instance with vm_state building and task_state spawning. [ 632.192986] env[68674]: DEBUG nova.compute.manager [req-daf1b360-c01d-4aba-a8dc-1e924c73c97b req-aa68f5d0-ef88-4fa2-9c70-10a5e0c60067 service nova] [instance: a62237a7-a123-4378-b655-d489ef08474b] Received event network-changed-3be77e6c-a481-49ae-b015-141b0a40fe08 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 632.193156] env[68674]: DEBUG nova.compute.manager [req-daf1b360-c01d-4aba-a8dc-1e924c73c97b req-aa68f5d0-ef88-4fa2-9c70-10a5e0c60067 service nova] [instance: a62237a7-a123-4378-b655-d489ef08474b] Refreshing instance network info cache due to event network-changed-3be77e6c-a481-49ae-b015-141b0a40fe08. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 632.193368] env[68674]: DEBUG oslo_concurrency.lockutils [req-daf1b360-c01d-4aba-a8dc-1e924c73c97b req-aa68f5d0-ef88-4fa2-9c70-10a5e0c60067 service nova] Acquiring lock "refresh_cache-a62237a7-a123-4378-b655-d489ef08474b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.201059] env[68674]: INFO nova.compute.manager [-] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Took 1.40 seconds to deallocate network for instance. [ 632.214218] env[68674]: DEBUG nova.network.neutron [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: a62237a7-a123-4378-b655-d489ef08474b] Updating instance_info_cache with network_info: [{"id": "3be77e6c-a481-49ae-b015-141b0a40fe08", "address": "fa:16:3e:98:15:44", "network": {"id": "7929e6a8-f567-4383-88fc-a42599630731", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-442120451-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2974aa035d5d4591827e7713a403116d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3be77e6c-a4", "ovs_interfaceid": "3be77e6c-a481-49ae-b015-141b0a40fe08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 632.338592] env[68674]: DEBUG oslo_vmware.api [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Task: {'id': task-3239591, 'name': PowerOffVM_Task, 'duration_secs': 0.394184} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.339177] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 632.339480] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 632.339817] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f68ef3c3-e600-4b4a-8028-1b9fe7c391e9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.392913] env[68674]: DEBUG oslo_concurrency.lockutils [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Acquiring lock "0eaf7d72-755b-4977-8f71-7d53ad1cf573" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 632.392913] env[68674]: DEBUG oslo_concurrency.lockutils [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Lock "0eaf7d72-755b-4977-8f71-7d53ad1cf573" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 632.415891] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 632.416193] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 632.416419] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Deleting the datastore file [datastore2] f9168b78-ed64-4109-84f0-db0af61d2f10 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 632.416675] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-04a7a402-da78-491d-8b62-079eebca8a49 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.426254] env[68674]: DEBUG oslo_vmware.api [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Waiting for the task: (returnval){ [ 632.426254] env[68674]: value = "task-3239593" [ 632.426254] env[68674]: _type = "Task" [ 632.426254] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.438961] env[68674]: DEBUG oslo_vmware.api [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Task: {'id': task-3239593, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.665038] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7ba7524-ecfe-4d0e-b039-48d03204639c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.673632] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26e3d1d8-269c-4738-86ba-697cf942aea9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.706654] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2196bd93-d006-42ee-87ce-070b29929c3f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.711974] env[68674]: DEBUG oslo_concurrency.lockutils [None req-17b897d9-07f5-4fee-8b0d-75ba33635ac6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 632.715381] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b0c52b2-2c99-4804-aab3-9a6ac1ee24e4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.719643] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Releasing lock "refresh_cache-a62237a7-a123-4378-b655-d489ef08474b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 632.719974] env[68674]: DEBUG nova.compute.manager [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: a62237a7-a123-4378-b655-d489ef08474b] Instance network_info: |[{"id": "3be77e6c-a481-49ae-b015-141b0a40fe08", "address": "fa:16:3e:98:15:44", "network": {"id": "7929e6a8-f567-4383-88fc-a42599630731", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-442120451-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2974aa035d5d4591827e7713a403116d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3be77e6c-a4", "ovs_interfaceid": "3be77e6c-a481-49ae-b015-141b0a40fe08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 632.720261] env[68674]: DEBUG oslo_concurrency.lockutils [req-daf1b360-c01d-4aba-a8dc-1e924c73c97b req-aa68f5d0-ef88-4fa2-9c70-10a5e0c60067 service nova] Acquired lock "refresh_cache-a62237a7-a123-4378-b655-d489ef08474b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 632.720457] env[68674]: DEBUG nova.network.neutron [req-daf1b360-c01d-4aba-a8dc-1e924c73c97b req-aa68f5d0-ef88-4fa2-9c70-10a5e0c60067 service nova] [instance: a62237a7-a123-4378-b655-d489ef08474b] Refreshing network info cache for port 3be77e6c-a481-49ae-b015-141b0a40fe08 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 632.721611] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: a62237a7-a123-4378-b655-d489ef08474b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:98:15:44', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e445fb59-822c-4d7d-943b-c8e3bbaca62e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3be77e6c-a481-49ae-b015-141b0a40fe08', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 632.729362] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Creating folder: Project (2974aa035d5d4591827e7713a403116d). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 632.730446] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8680c2ae-92c3-4a00-ac40-737786b92e19 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.741320] env[68674]: DEBUG nova.compute.provider_tree [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 632.750296] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Created folder: Project (2974aa035d5d4591827e7713a403116d) in parent group-v647377. [ 632.750501] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Creating folder: Instances. Parent ref: group-v647458. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 632.751304] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5d19dd50-1156-410a-b027-284f093cd9dd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.762031] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Created folder: Instances in parent group-v647458. [ 632.762293] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 632.762486] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a62237a7-a123-4378-b655-d489ef08474b] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 632.762720] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4ae02a6f-518d-4d5a-9d7f-e1e17caeb8dc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.784520] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 632.784520] env[68674]: value = "task-3239596" [ 632.784520] env[68674]: _type = "Task" [ 632.784520] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.794815] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239596, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.937494] env[68674]: DEBUG oslo_vmware.api [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Task: {'id': task-3239593, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.361487} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.937819] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 632.938019] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 632.938186] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 632.938365] env[68674]: INFO nova.compute.manager [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Took 1.16 seconds to destroy the instance on the hypervisor. [ 632.938600] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 632.938789] env[68674]: DEBUG nova.compute.manager [-] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 632.938873] env[68674]: DEBUG nova.network.neutron [-] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 633.285490] env[68674]: DEBUG nova.scheduler.client.report [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Updated inventory for provider ade3f042-7427-494b-9654-0b65e074850c with generation 52 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 633.285490] env[68674]: DEBUG nova.compute.provider_tree [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Updating resource provider ade3f042-7427-494b-9654-0b65e074850c generation from 52 to 53 during operation: update_inventory {{(pid=68674) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 633.285490] env[68674]: DEBUG nova.compute.provider_tree [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 633.303074] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239596, 'name': CreateVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.796217] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 6.480s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 633.796217] env[68674]: DEBUG nova.compute.manager [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 633.802619] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.909s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 633.803540] env[68674]: INFO nova.compute.claims [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 633.806119] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239596, 'name': CreateVM_Task, 'duration_secs': 0.552199} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.806633] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a62237a7-a123-4378-b655-d489ef08474b] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 633.807308] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.808397] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 633.808397] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 633.808397] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d5fd264-147c-42d5-9827-9ed6fe5b78b0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.816721] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Waiting for the task: (returnval){ [ 633.816721] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523f6ce6-2ba7-3847-d29f-6bf07575ae38" [ 633.816721] env[68674]: _type = "Task" [ 633.816721] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.826755] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523f6ce6-2ba7-3847-d29f-6bf07575ae38, 'name': SearchDatastore_Task, 'duration_secs': 0.010262} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.827040] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 633.827367] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: a62237a7-a123-4378-b655-d489ef08474b] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 633.829860] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.829860] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 633.829860] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 633.829860] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ac845ae3-1052-4d30-a5aa-9fb4ca7795c1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.838710] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 633.838908] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 633.839655] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9898a24-c4d8-4a1e-9f45-79d09ef06c63 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.845524] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Waiting for the task: (returnval){ [ 633.845524] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b24587-4dbb-b5b5-ea7d-bf1cdf46b35d" [ 633.845524] env[68674]: _type = "Task" [ 633.845524] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.854950] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b24587-4dbb-b5b5-ea7d-bf1cdf46b35d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.917187] env[68674]: DEBUG nova.network.neutron [-] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.937486] env[68674]: DEBUG nova.network.neutron [req-daf1b360-c01d-4aba-a8dc-1e924c73c97b req-aa68f5d0-ef88-4fa2-9c70-10a5e0c60067 service nova] [instance: a62237a7-a123-4378-b655-d489ef08474b] Updated VIF entry in instance network info cache for port 3be77e6c-a481-49ae-b015-141b0a40fe08. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 633.937857] env[68674]: DEBUG nova.network.neutron [req-daf1b360-c01d-4aba-a8dc-1e924c73c97b req-aa68f5d0-ef88-4fa2-9c70-10a5e0c60067 service nova] [instance: a62237a7-a123-4378-b655-d489ef08474b] Updating instance_info_cache with network_info: [{"id": "3be77e6c-a481-49ae-b015-141b0a40fe08", "address": "fa:16:3e:98:15:44", "network": {"id": "7929e6a8-f567-4383-88fc-a42599630731", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-442120451-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2974aa035d5d4591827e7713a403116d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3be77e6c-a4", "ovs_interfaceid": "3be77e6c-a481-49ae-b015-141b0a40fe08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.309501] env[68674]: DEBUG nova.compute.utils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 634.316608] env[68674]: DEBUG nova.compute.manager [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 634.317153] env[68674]: DEBUG nova.network.neutron [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 634.320771] env[68674]: DEBUG nova.compute.manager [req-d0e7bad8-5888-459c-a7eb-1d586ce0120d req-377cf7c1-c439-438d-8d0b-6c3e37e6e69d service nova] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Received event network-vif-deleted-c0118b5a-b34d-4c54-8270-5f4ce3b9e18d {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 634.362088] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b24587-4dbb-b5b5-ea7d-bf1cdf46b35d, 'name': SearchDatastore_Task, 'duration_secs': 0.009903} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.362698] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f07ed91a-1da1-4d8c-bdb5-81e9e0d1b33b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.370373] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Waiting for the task: (returnval){ [ 634.370373] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523cc09f-cf8d-2228-5ab2-4a645fd6a91f" [ 634.370373] env[68674]: _type = "Task" [ 634.370373] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.379256] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523cc09f-cf8d-2228-5ab2-4a645fd6a91f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.419138] env[68674]: INFO nova.compute.manager [-] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Took 1.48 seconds to deallocate network for instance. [ 634.424306] env[68674]: DEBUG nova.policy [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c6a2f94537e34cfd98e83f6af711087d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2974aa035d5d4591827e7713a403116d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 634.446028] env[68674]: DEBUG oslo_concurrency.lockutils [req-daf1b360-c01d-4aba-a8dc-1e924c73c97b req-aa68f5d0-ef88-4fa2-9c70-10a5e0c60067 service nova] Releasing lock "refresh_cache-a62237a7-a123-4378-b655-d489ef08474b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 634.446028] env[68674]: DEBUG nova.compute.manager [req-daf1b360-c01d-4aba-a8dc-1e924c73c97b req-aa68f5d0-ef88-4fa2-9c70-10a5e0c60067 service nova] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Received event network-vif-deleted-40a38082-1691-4d4b-9fce-f07687409a92 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 634.817599] env[68674]: DEBUG nova.compute.manager [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 634.886489] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523cc09f-cf8d-2228-5ab2-4a645fd6a91f, 'name': SearchDatastore_Task, 'duration_secs': 0.013502} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.886910] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 634.887747] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] a62237a7-a123-4378-b655-d489ef08474b/a62237a7-a123-4378-b655-d489ef08474b.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 634.890976] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1d14d6bf-38e4-4720-87fe-1e7baf111721 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.903877] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Waiting for the task: (returnval){ [ 634.903877] env[68674]: value = "task-3239597" [ 634.903877] env[68674]: _type = "Task" [ 634.903877] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.915553] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239597, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.935611] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 634.974643] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Acquiring lock "6803af03-b1d5-47e6-9471-5213469e4103" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 634.974985] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Lock "6803af03-b1d5-47e6-9471-5213469e4103" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 635.055126] env[68674]: DEBUG nova.network.neutron [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Successfully created port: 1602cb92-5f66-425c-a152-a0fdd777da11 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 635.426887] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239597, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.433836] env[68674]: DEBUG oslo_concurrency.lockutils [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Acquiring lock "3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 635.434104] env[68674]: DEBUG oslo_concurrency.lockutils [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Lock "3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 635.631042] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8177a109-8ac1-4096-b46c-6237fd383b7d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.640039] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cff72639-d50d-466f-abe1-ac1344e2cc31 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.680543] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b9cf0fb-9516-45ea-9a5a-58cd9ad3b25d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.689480] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ed111e0-23e6-4fa2-b271-67b58cfaaffe {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.705661] env[68674]: DEBUG nova.compute.provider_tree [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 635.833297] env[68674]: DEBUG nova.compute.manager [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 635.863400] env[68674]: DEBUG nova.virt.hardware [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 635.863643] env[68674]: DEBUG nova.virt.hardware [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 635.863800] env[68674]: DEBUG nova.virt.hardware [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 635.863980] env[68674]: DEBUG nova.virt.hardware [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 635.865224] env[68674]: DEBUG nova.virt.hardware [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 635.865403] env[68674]: DEBUG nova.virt.hardware [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 635.865622] env[68674]: DEBUG nova.virt.hardware [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 635.865784] env[68674]: DEBUG nova.virt.hardware [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 635.865950] env[68674]: DEBUG nova.virt.hardware [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 635.866155] env[68674]: DEBUG nova.virt.hardware [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 635.866352] env[68674]: DEBUG nova.virt.hardware [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 635.870801] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88a882d7-7331-48e7-8eea-13a846d580eb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.877082] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cb0640b-8c38-4627-9ba3-c3b1340da1ac {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.916700] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239597, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.532276} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.916961] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] a62237a7-a123-4378-b655-d489ef08474b/a62237a7-a123-4378-b655-d489ef08474b.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 635.917184] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: a62237a7-a123-4378-b655-d489ef08474b] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 635.917424] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-32aa996b-209c-46a4-8386-f7f02dfaa92e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.927517] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Waiting for the task: (returnval){ [ 635.927517] env[68674]: value = "task-3239598" [ 635.927517] env[68674]: _type = "Task" [ 635.927517] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.941262] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239598, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.209578] env[68674]: DEBUG nova.scheduler.client.report [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 636.438813] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239598, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073532} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.439463] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: a62237a7-a123-4378-b655-d489ef08474b] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 636.440921] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53092177-0368-4fd2-9f28-140fe17225ab {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.472824] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: a62237a7-a123-4378-b655-d489ef08474b] Reconfiguring VM instance instance-00000019 to attach disk [datastore2] a62237a7-a123-4378-b655-d489ef08474b/a62237a7-a123-4378-b655-d489ef08474b.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 636.472824] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cccbb916-0dd0-43b3-a7a0-e76593c60456 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.495453] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Waiting for the task: (returnval){ [ 636.495453] env[68674]: value = "task-3239599" [ 636.495453] env[68674]: _type = "Task" [ 636.495453] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.504084] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239599, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.638817] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 636.639202] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 636.659387] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquiring lock "1699f556-d451-40e3-a213-7edb753b03f1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 636.659625] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "1699f556-d451-40e3-a213-7edb753b03f1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 636.697325] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquiring lock "0e3c27fe-a2d9-45dc-9559-a678f90a6fef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 636.697546] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "0e3c27fe-a2d9-45dc-9559-a678f90a6fef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 636.716523] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.914s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 636.716739] env[68674]: DEBUG nova.compute.manager [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 636.720468] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d0f5b293-6ed2-4b49-8bf3-650906ddde68 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.472s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 636.720468] env[68674]: DEBUG nova.objects.instance [None req-d0f5b293-6ed2-4b49-8bf3-650906ddde68 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lazy-loading 'resources' on Instance uuid e84db5bd-b6ec-42ef-9c34-a4160c44d973 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 636.785131] env[68674]: DEBUG nova.compute.manager [req-0516e26e-af21-4f83-8ace-7097fbe56a7f req-d7754c01-5f75-4e67-8f7e-fee457e2b399 service nova] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Received event network-vif-plugged-1602cb92-5f66-425c-a152-a0fdd777da11 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 636.785337] env[68674]: DEBUG oslo_concurrency.lockutils [req-0516e26e-af21-4f83-8ace-7097fbe56a7f req-d7754c01-5f75-4e67-8f7e-fee457e2b399 service nova] Acquiring lock "b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 636.785663] env[68674]: DEBUG oslo_concurrency.lockutils [req-0516e26e-af21-4f83-8ace-7097fbe56a7f req-d7754c01-5f75-4e67-8f7e-fee457e2b399 service nova] Lock "b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 636.785904] env[68674]: DEBUG oslo_concurrency.lockutils [req-0516e26e-af21-4f83-8ace-7097fbe56a7f req-d7754c01-5f75-4e67-8f7e-fee457e2b399 service nova] Lock "b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 636.786145] env[68674]: DEBUG nova.compute.manager [req-0516e26e-af21-4f83-8ace-7097fbe56a7f req-d7754c01-5f75-4e67-8f7e-fee457e2b399 service nova] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] No waiting events found dispatching network-vif-plugged-1602cb92-5f66-425c-a152-a0fdd777da11 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 636.787169] env[68674]: WARNING nova.compute.manager [req-0516e26e-af21-4f83-8ace-7097fbe56a7f req-d7754c01-5f75-4e67-8f7e-fee457e2b399 service nova] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Received unexpected event network-vif-plugged-1602cb92-5f66-425c-a152-a0fdd777da11 for instance with vm_state building and task_state spawning. [ 636.883284] env[68674]: DEBUG nova.network.neutron [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Successfully updated port: 1602cb92-5f66-425c-a152-a0fdd777da11 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 637.009476] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239599, 'name': ReconfigVM_Task, 'duration_secs': 0.357096} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.010452] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: a62237a7-a123-4378-b655-d489ef08474b] Reconfigured VM instance instance-00000019 to attach disk [datastore2] a62237a7-a123-4378-b655-d489ef08474b/a62237a7-a123-4378-b655-d489ef08474b.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 637.013666] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a4b2e283-d5a7-4d04-a1d6-744aeca148ce {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.021711] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Waiting for the task: (returnval){ [ 637.021711] env[68674]: value = "task-3239600" [ 637.021711] env[68674]: _type = "Task" [ 637.021711] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.035131] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239600, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.148837] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 637.149041] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 637.149200] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 637.149353] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 637.149501] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 637.149647] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 637.149776] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68674) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 637.149922] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager.update_available_resource {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 637.222696] env[68674]: DEBUG nova.compute.utils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 637.224227] env[68674]: DEBUG nova.compute.manager [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 637.224410] env[68674]: DEBUG nova.network.neutron [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 637.285568] env[68674]: DEBUG nova.policy [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c6a2f94537e34cfd98e83f6af711087d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2974aa035d5d4591827e7713a403116d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 637.385657] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Acquiring lock "refresh_cache-b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.385657] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Acquired lock "refresh_cache-b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 637.385657] env[68674]: DEBUG nova.network.neutron [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 637.536503] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239600, 'name': Rename_Task, 'duration_secs': 0.146692} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.539738] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: a62237a7-a123-4378-b655-d489ef08474b] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 637.540290] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9691a187-d2e0-47e9-9b8c-db12b428a4ae {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.549087] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Waiting for the task: (returnval){ [ 637.549087] env[68674]: value = "task-3239601" [ 637.549087] env[68674]: _type = "Task" [ 637.549087] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.564279] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239601, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.653515] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 637.696813] env[68674]: DEBUG nova.network.neutron [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Successfully created port: 04dca9ad-f56b-402c-b76b-3c4ecda2e500 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 637.730012] env[68674]: DEBUG nova.compute.manager [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 637.927524] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-897a2c5b-e2d7-4971-9160-e71cd130cb35 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.945211] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4459f8ad-48d1-45af-a2e8-4486a00b2f19 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.979936] env[68674]: DEBUG nova.network.neutron [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 637.985015] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee87f0c9-7104-4e94-91c5-13437f7e6eee {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.992666] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-751adf7c-7a59-4730-9bea-48f68ee69f2e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.014839] env[68674]: DEBUG nova.compute.provider_tree [None req-d0f5b293-6ed2-4b49-8bf3-650906ddde68 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 638.061023] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239601, 'name': PowerOnVM_Task, 'duration_secs': 0.471677} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.061333] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: a62237a7-a123-4378-b655-d489ef08474b] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 638.061588] env[68674]: INFO nova.compute.manager [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: a62237a7-a123-4378-b655-d489ef08474b] Took 8.68 seconds to spawn the instance on the hypervisor. [ 638.061775] env[68674]: DEBUG nova.compute.manager [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: a62237a7-a123-4378-b655-d489ef08474b] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 638.062657] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ff7248b-d2d6-4946-8d52-7d9e3974aaf6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.237849] env[68674]: DEBUG nova.network.neutron [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Updating instance_info_cache with network_info: [{"id": "1602cb92-5f66-425c-a152-a0fdd777da11", "address": "fa:16:3e:1e:eb:0d", "network": {"id": "7929e6a8-f567-4383-88fc-a42599630731", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-442120451-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2974aa035d5d4591827e7713a403116d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1602cb92-5f", "ovs_interfaceid": "1602cb92-5f66-425c-a152-a0fdd777da11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 638.520957] env[68674]: DEBUG nova.scheduler.client.report [None req-d0f5b293-6ed2-4b49-8bf3-650906ddde68 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 638.585445] env[68674]: INFO nova.compute.manager [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: a62237a7-a123-4378-b655-d489ef08474b] Took 33.59 seconds to build instance. [ 638.740780] env[68674]: DEBUG nova.compute.manager [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 638.743406] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Releasing lock "refresh_cache-b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 638.743696] env[68674]: DEBUG nova.compute.manager [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Instance network_info: |[{"id": "1602cb92-5f66-425c-a152-a0fdd777da11", "address": "fa:16:3e:1e:eb:0d", "network": {"id": "7929e6a8-f567-4383-88fc-a42599630731", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-442120451-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2974aa035d5d4591827e7713a403116d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1602cb92-5f", "ovs_interfaceid": "1602cb92-5f66-425c-a152-a0fdd777da11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 638.744341] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:eb:0d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e445fb59-822c-4d7d-943b-c8e3bbaca62e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1602cb92-5f66-425c-a152-a0fdd777da11', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 638.751977] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 638.754529] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 638.754529] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-22a5be02-ba4a-46dc-b4bf-df64cbebb3b6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.781346] env[68674]: DEBUG nova.virt.hardware [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 638.781614] env[68674]: DEBUG nova.virt.hardware [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 638.781770] env[68674]: DEBUG nova.virt.hardware [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 638.781946] env[68674]: DEBUG nova.virt.hardware [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 638.782103] env[68674]: DEBUG nova.virt.hardware [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 638.782252] env[68674]: DEBUG nova.virt.hardware [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 638.782458] env[68674]: DEBUG nova.virt.hardware [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 638.782620] env[68674]: DEBUG nova.virt.hardware [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 638.782782] env[68674]: DEBUG nova.virt.hardware [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 638.782938] env[68674]: DEBUG nova.virt.hardware [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 638.784224] env[68674]: DEBUG nova.virt.hardware [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 638.785133] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7f366f1-77b2-4301-b5e0-60133761b593 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.788834] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 638.788834] env[68674]: value = "task-3239602" [ 638.788834] env[68674]: _type = "Task" [ 638.788834] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.796139] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03fb3b75-44a6-4d5b-b776-7aa2091fc49e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.803501] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239602, 'name': CreateVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.918889] env[68674]: DEBUG nova.compute.manager [req-2bf6c3f0-41db-4a04-bd37-b2e13747b662 req-a2b97c8b-44ea-4b4d-839a-a2a7b2669133 service nova] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Received event network-changed-1602cb92-5f66-425c-a152-a0fdd777da11 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 638.919082] env[68674]: DEBUG nova.compute.manager [req-2bf6c3f0-41db-4a04-bd37-b2e13747b662 req-a2b97c8b-44ea-4b4d-839a-a2a7b2669133 service nova] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Refreshing instance network info cache due to event network-changed-1602cb92-5f66-425c-a152-a0fdd777da11. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 638.919311] env[68674]: DEBUG oslo_concurrency.lockutils [req-2bf6c3f0-41db-4a04-bd37-b2e13747b662 req-a2b97c8b-44ea-4b4d-839a-a2a7b2669133 service nova] Acquiring lock "refresh_cache-b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.919667] env[68674]: DEBUG oslo_concurrency.lockutils [req-2bf6c3f0-41db-4a04-bd37-b2e13747b662 req-a2b97c8b-44ea-4b4d-839a-a2a7b2669133 service nova] Acquired lock "refresh_cache-b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 638.919667] env[68674]: DEBUG nova.network.neutron [req-2bf6c3f0-41db-4a04-bd37-b2e13747b662 req-a2b97c8b-44ea-4b4d-839a-a2a7b2669133 service nova] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Refreshing network info cache for port 1602cb92-5f66-425c-a152-a0fdd777da11 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 639.026654] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d0f5b293-6ed2-4b49-8bf3-650906ddde68 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.307s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 639.029663] env[68674]: DEBUG oslo_concurrency.lockutils [None req-724e647c-44ff-4ddf-b388-72e56faac224 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.171s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 639.029959] env[68674]: DEBUG nova.objects.instance [None req-724e647c-44ff-4ddf-b388-72e56faac224 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Lazy-loading 'resources' on Instance uuid 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 639.060581] env[68674]: INFO nova.scheduler.client.report [None req-d0f5b293-6ed2-4b49-8bf3-650906ddde68 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Deleted allocations for instance e84db5bd-b6ec-42ef-9c34-a4160c44d973 [ 639.089320] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Lock "a62237a7-a123-4378-b655-d489ef08474b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.395s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 639.300609] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239602, 'name': CreateVM_Task, 'duration_secs': 0.394699} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.300820] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 639.301594] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 639.301774] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 639.302103] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 639.302404] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-735d324f-a867-45e1-9cb2-1675b0db88cb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.310156] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Waiting for the task: (returnval){ [ 639.310156] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5274c72f-b0e3-dd7e-f8cd-db074e9e853b" [ 639.310156] env[68674]: _type = "Task" [ 639.310156] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.324978] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5274c72f-b0e3-dd7e-f8cd-db074e9e853b, 'name': SearchDatastore_Task, 'duration_secs': 0.01316} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.325196] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 639.325422] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 639.325658] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 639.325806] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 639.325983] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 639.326293] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-21594f02-2415-4bf2-9f76-78869ccb5ab2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.336155] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 639.336347] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 639.337481] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11610651-cd55-4ae9-9ded-787381b3c308 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.343767] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Waiting for the task: (returnval){ [ 639.343767] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ce6e10-d174-ed2a-f0ad-792b6aa2d7b8" [ 639.343767] env[68674]: _type = "Task" [ 639.343767] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.353179] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ce6e10-d174-ed2a-f0ad-792b6aa2d7b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.524022] env[68674]: DEBUG nova.network.neutron [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Successfully updated port: 04dca9ad-f56b-402c-b76b-3c4ecda2e500 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 639.568572] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d0f5b293-6ed2-4b49-8bf3-650906ddde68 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "e84db5bd-b6ec-42ef-9c34-a4160c44d973" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.630s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 639.591080] env[68674]: DEBUG nova.compute.manager [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 639.739598] env[68674]: DEBUG nova.network.neutron [req-2bf6c3f0-41db-4a04-bd37-b2e13747b662 req-a2b97c8b-44ea-4b4d-839a-a2a7b2669133 service nova] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Updated VIF entry in instance network info cache for port 1602cb92-5f66-425c-a152-a0fdd777da11. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 639.740516] env[68674]: DEBUG nova.network.neutron [req-2bf6c3f0-41db-4a04-bd37-b2e13747b662 req-a2b97c8b-44ea-4b4d-839a-a2a7b2669133 service nova] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Updating instance_info_cache with network_info: [{"id": "1602cb92-5f66-425c-a152-a0fdd777da11", "address": "fa:16:3e:1e:eb:0d", "network": {"id": "7929e6a8-f567-4383-88fc-a42599630731", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-442120451-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2974aa035d5d4591827e7713a403116d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1602cb92-5f", "ovs_interfaceid": "1602cb92-5f66-425c-a152-a0fdd777da11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 639.858039] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ce6e10-d174-ed2a-f0ad-792b6aa2d7b8, 'name': SearchDatastore_Task, 'duration_secs': 0.011557} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.863185] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88023dc4-47ff-4135-8044-5a01835ea9e9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.869507] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Waiting for the task: (returnval){ [ 639.869507] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52bb62c2-2f26-0b3a-0326-723239c01f10" [ 639.869507] env[68674]: _type = "Task" [ 639.869507] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.881011] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52bb62c2-2f26-0b3a-0326-723239c01f10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.027114] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Acquiring lock "refresh_cache-275cdfcc-06f0-4c29-b18b-55cde38480a3" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 640.030223] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Acquired lock "refresh_cache-275cdfcc-06f0-4c29-b18b-55cde38480a3" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 640.030223] env[68674]: DEBUG nova.network.neutron [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 640.112163] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 640.171038] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eb3cf8e-f958-4286-af42-8b5af40a90be {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.180163] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09360867-f1f5-4c56-b7e5-64b3750ed2ff {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.212251] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d25948b-8ffe-4cb5-bfcc-b3aea7bd99e2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.219949] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a1f2fc-ed6d-4706-8c83-7ad831a2ddf5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.236314] env[68674]: DEBUG nova.compute.provider_tree [None req-724e647c-44ff-4ddf-b388-72e56faac224 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 640.246578] env[68674]: DEBUG oslo_concurrency.lockutils [req-2bf6c3f0-41db-4a04-bd37-b2e13747b662 req-a2b97c8b-44ea-4b4d-839a-a2a7b2669133 service nova] Releasing lock "refresh_cache-b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 640.380623] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52bb62c2-2f26-0b3a-0326-723239c01f10, 'name': SearchDatastore_Task, 'duration_secs': 0.01132} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.380991] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 640.381279] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14/b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 640.381477] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d3c578db-32ed-4a61-8d29-cd190c31780a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.390911] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Waiting for the task: (returnval){ [ 640.390911] env[68674]: value = "task-3239603" [ 640.390911] env[68674]: _type = "Task" [ 640.390911] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.401523] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239603, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.582451] env[68674]: DEBUG nova.network.neutron [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 640.739594] env[68674]: DEBUG nova.scheduler.client.report [None req-724e647c-44ff-4ddf-b388-72e56faac224 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 640.829128] env[68674]: DEBUG nova.network.neutron [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Updating instance_info_cache with network_info: [{"id": "04dca9ad-f56b-402c-b76b-3c4ecda2e500", "address": "fa:16:3e:03:2c:00", "network": {"id": "7929e6a8-f567-4383-88fc-a42599630731", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-442120451-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2974aa035d5d4591827e7713a403116d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap04dca9ad-f5", "ovs_interfaceid": "04dca9ad-f56b-402c-b76b-3c4ecda2e500", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 640.902639] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239603, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.452346} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.902900] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14/b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 640.903128] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 640.903383] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3f97eb3f-0d15-4f1c-8281-42eaea0d65ff {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.911424] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Waiting for the task: (returnval){ [ 640.911424] env[68674]: value = "task-3239604" [ 640.911424] env[68674]: _type = "Task" [ 640.911424] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.920587] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239604, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.977285] env[68674]: DEBUG nova.compute.manager [req-a8464132-597a-4848-9bc6-10a6a827db40 req-2dd26529-2c86-4433-b20d-48a236e9346c service nova] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Received event network-vif-plugged-04dca9ad-f56b-402c-b76b-3c4ecda2e500 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 640.977633] env[68674]: DEBUG oslo_concurrency.lockutils [req-a8464132-597a-4848-9bc6-10a6a827db40 req-2dd26529-2c86-4433-b20d-48a236e9346c service nova] Acquiring lock "275cdfcc-06f0-4c29-b18b-55cde38480a3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 640.977928] env[68674]: DEBUG oslo_concurrency.lockutils [req-a8464132-597a-4848-9bc6-10a6a827db40 req-2dd26529-2c86-4433-b20d-48a236e9346c service nova] Lock "275cdfcc-06f0-4c29-b18b-55cde38480a3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 640.978159] env[68674]: DEBUG oslo_concurrency.lockutils [req-a8464132-597a-4848-9bc6-10a6a827db40 req-2dd26529-2c86-4433-b20d-48a236e9346c service nova] Lock "275cdfcc-06f0-4c29-b18b-55cde38480a3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 640.978431] env[68674]: DEBUG nova.compute.manager [req-a8464132-597a-4848-9bc6-10a6a827db40 req-2dd26529-2c86-4433-b20d-48a236e9346c service nova] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] No waiting events found dispatching network-vif-plugged-04dca9ad-f56b-402c-b76b-3c4ecda2e500 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 640.978592] env[68674]: WARNING nova.compute.manager [req-a8464132-597a-4848-9bc6-10a6a827db40 req-2dd26529-2c86-4433-b20d-48a236e9346c service nova] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Received unexpected event network-vif-plugged-04dca9ad-f56b-402c-b76b-3c4ecda2e500 for instance with vm_state building and task_state spawning. [ 640.978850] env[68674]: DEBUG nova.compute.manager [req-a8464132-597a-4848-9bc6-10a6a827db40 req-2dd26529-2c86-4433-b20d-48a236e9346c service nova] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Received event network-changed-04dca9ad-f56b-402c-b76b-3c4ecda2e500 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 640.979050] env[68674]: DEBUG nova.compute.manager [req-a8464132-597a-4848-9bc6-10a6a827db40 req-2dd26529-2c86-4433-b20d-48a236e9346c service nova] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Refreshing instance network info cache due to event network-changed-04dca9ad-f56b-402c-b76b-3c4ecda2e500. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 640.979283] env[68674]: DEBUG oslo_concurrency.lockutils [req-a8464132-597a-4848-9bc6-10a6a827db40 req-2dd26529-2c86-4433-b20d-48a236e9346c service nova] Acquiring lock "refresh_cache-275cdfcc-06f0-4c29-b18b-55cde38480a3" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.246320] env[68674]: DEBUG oslo_concurrency.lockutils [None req-724e647c-44ff-4ddf-b388-72e56faac224 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.217s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 641.249723] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.069s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 641.251473] env[68674]: INFO nova.compute.claims [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 641.279436] env[68674]: INFO nova.scheduler.client.report [None req-724e647c-44ff-4ddf-b388-72e56faac224 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Deleted allocations for instance 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d [ 641.332565] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Releasing lock "refresh_cache-275cdfcc-06f0-4c29-b18b-55cde38480a3" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 641.333185] env[68674]: DEBUG nova.compute.manager [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Instance network_info: |[{"id": "04dca9ad-f56b-402c-b76b-3c4ecda2e500", "address": "fa:16:3e:03:2c:00", "network": {"id": "7929e6a8-f567-4383-88fc-a42599630731", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-442120451-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2974aa035d5d4591827e7713a403116d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap04dca9ad-f5", "ovs_interfaceid": "04dca9ad-f56b-402c-b76b-3c4ecda2e500", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 641.334047] env[68674]: DEBUG oslo_concurrency.lockutils [req-a8464132-597a-4848-9bc6-10a6a827db40 req-2dd26529-2c86-4433-b20d-48a236e9346c service nova] Acquired lock "refresh_cache-275cdfcc-06f0-4c29-b18b-55cde38480a3" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 641.334047] env[68674]: DEBUG nova.network.neutron [req-a8464132-597a-4848-9bc6-10a6a827db40 req-2dd26529-2c86-4433-b20d-48a236e9346c service nova] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Refreshing network info cache for port 04dca9ad-f56b-402c-b76b-3c4ecda2e500 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 641.335315] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:03:2c:00', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e445fb59-822c-4d7d-943b-c8e3bbaca62e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '04dca9ad-f56b-402c-b76b-3c4ecda2e500', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 641.348049] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 641.349156] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 641.349335] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cc64c0e8-9492-438e-94c0-abcf74793a41 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.373388] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 641.373388] env[68674]: value = "task-3239605" [ 641.373388] env[68674]: _type = "Task" [ 641.373388] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.383409] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239605, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.421648] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239604, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073747} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.421954] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 641.422777] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9468e42-a56d-45cc-9654-db0a142fff1b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.446246] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14/b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 641.446562] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-585e3090-b3b1-4c68-8d18-01e8a3783aa8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.468828] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Waiting for the task: (returnval){ [ 641.468828] env[68674]: value = "task-3239606" [ 641.468828] env[68674]: _type = "Task" [ 641.468828] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.480872] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239606, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.791066] env[68674]: DEBUG oslo_concurrency.lockutils [None req-724e647c-44ff-4ddf-b388-72e56faac224 tempest-ServerAddressesTestJSON-1371612918 tempest-ServerAddressesTestJSON-1371612918-project-member] Lock "9e337960-78c1-4ddb-a6f6-d6fd57dbf86d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.425s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 641.889710] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239605, 'name': CreateVM_Task, 'duration_secs': 0.401015} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.889903] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 641.890626] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.890804] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 641.891138] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 641.891426] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4552640-1f69-416c-8c83-6a81c8013474 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.897530] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Waiting for the task: (returnval){ [ 641.897530] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52439d57-7d65-3b70-521b-1678a7d08d09" [ 641.897530] env[68674]: _type = "Task" [ 641.897530] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.907522] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52439d57-7d65-3b70-521b-1678a7d08d09, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.979069] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239606, 'name': ReconfigVM_Task, 'duration_secs': 0.303707} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.979351] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Reconfigured VM instance instance-0000001a to attach disk [datastore1] b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14/b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 641.979966] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b8b4e6be-af69-4de4-9996-9d21227bf36a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.988670] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Waiting for the task: (returnval){ [ 641.988670] env[68674]: value = "task-3239607" [ 641.988670] env[68674]: _type = "Task" [ 641.988670] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.997667] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239607, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.408920] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52439d57-7d65-3b70-521b-1678a7d08d09, 'name': SearchDatastore_Task, 'duration_secs': 0.088604} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.411019] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 642.411265] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 642.411501] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.411650] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 642.411827] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 642.412292] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e52be650-57f3-44bd-b479-42d51ea7dabc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.425198] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 642.425395] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 642.428224] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2876add0-2534-417c-bbf7-e625e66021c0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.433787] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Waiting for the task: (returnval){ [ 642.433787] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]522f2b61-1455-07f0-441f-4543601e87ea" [ 642.433787] env[68674]: _type = "Task" [ 642.433787] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.441766] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]522f2b61-1455-07f0-441f-4543601e87ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.498121] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239607, 'name': Rename_Task, 'duration_secs': 0.152149} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.498401] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 642.498642] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8638c627-e86b-46b0-9401-4219840e1e41 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.506064] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Waiting for the task: (returnval){ [ 642.506064] env[68674]: value = "task-3239608" [ 642.506064] env[68674]: _type = "Task" [ 642.506064] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.514014] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239608, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.528086] env[68674]: DEBUG nova.network.neutron [req-a8464132-597a-4848-9bc6-10a6a827db40 req-2dd26529-2c86-4433-b20d-48a236e9346c service nova] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Updated VIF entry in instance network info cache for port 04dca9ad-f56b-402c-b76b-3c4ecda2e500. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 642.528466] env[68674]: DEBUG nova.network.neutron [req-a8464132-597a-4848-9bc6-10a6a827db40 req-2dd26529-2c86-4433-b20d-48a236e9346c service nova] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Updating instance_info_cache with network_info: [{"id": "04dca9ad-f56b-402c-b76b-3c4ecda2e500", "address": "fa:16:3e:03:2c:00", "network": {"id": "7929e6a8-f567-4383-88fc-a42599630731", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-442120451-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2974aa035d5d4591827e7713a403116d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap04dca9ad-f5", "ovs_interfaceid": "04dca9ad-f56b-402c-b76b-3c4ecda2e500", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 642.862040] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cbbd0f3-ca58-4838-958b-a513e894f1d9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.869835] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75cce301-a2ef-4058-b96a-168de09afca6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.903588] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e55b3243-d76c-4c94-b059-6fffb7be77f7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.912670] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3df5064-465a-4694-bedf-86d472987931 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.928670] env[68674]: DEBUG nova.compute.provider_tree [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 642.945881] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]522f2b61-1455-07f0-441f-4543601e87ea, 'name': SearchDatastore_Task, 'duration_secs': 0.044409} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.948315] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a61a809d-32f3-427f-8e79-6a3524dd3e23 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.956037] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Waiting for the task: (returnval){ [ 642.956037] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521516fe-f1de-6c86-5bf7-c59ae27b2eef" [ 642.956037] env[68674]: _type = "Task" [ 642.956037] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.965878] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521516fe-f1de-6c86-5bf7-c59ae27b2eef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.016637] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239608, 'name': PowerOnVM_Task, 'duration_secs': 0.472649} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.016905] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 643.017502] env[68674]: INFO nova.compute.manager [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Took 7.18 seconds to spawn the instance on the hypervisor. [ 643.017734] env[68674]: DEBUG nova.compute.manager [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 643.018562] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e6eb72f-3c3e-46cb-8d38-a8f4067c0d01 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.032255] env[68674]: DEBUG oslo_concurrency.lockutils [req-a8464132-597a-4848-9bc6-10a6a827db40 req-2dd26529-2c86-4433-b20d-48a236e9346c service nova] Releasing lock "refresh_cache-275cdfcc-06f0-4c29-b18b-55cde38480a3" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 643.432156] env[68674]: DEBUG nova.scheduler.client.report [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 643.470482] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521516fe-f1de-6c86-5bf7-c59ae27b2eef, 'name': SearchDatastore_Task, 'duration_secs': 0.02762} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.470759] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 643.471137] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 275cdfcc-06f0-4c29-b18b-55cde38480a3/275cdfcc-06f0-4c29-b18b-55cde38480a3.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 643.471311] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e58777f7-92af-4105-ac31-e189529aed5d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.481344] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Waiting for the task: (returnval){ [ 643.481344] env[68674]: value = "task-3239609" [ 643.481344] env[68674]: _type = "Task" [ 643.481344] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.490631] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239609, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.540583] env[68674]: INFO nova.compute.manager [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Took 37.82 seconds to build instance. [ 643.940080] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.690s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 643.941139] env[68674]: DEBUG nova.compute.manager [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 643.946066] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.749s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 643.949019] env[68674]: INFO nova.compute.claims [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 643.999889] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239609, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.043352] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Lock "b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.310s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 644.454556] env[68674]: DEBUG nova.compute.utils [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 644.462025] env[68674]: DEBUG nova.compute.manager [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Not allocating networking since 'none' was specified. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 644.498021] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239609, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.543126} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.498021] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 275cdfcc-06f0-4c29-b18b-55cde38480a3/275cdfcc-06f0-4c29-b18b-55cde38480a3.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 644.498021] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 644.498021] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a079da12-b038-45ab-ad1b-c27f681fea07 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.505929] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Waiting for the task: (returnval){ [ 644.505929] env[68674]: value = "task-3239610" [ 644.505929] env[68674]: _type = "Task" [ 644.505929] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.517308] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239610, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.550037] env[68674]: DEBUG nova.compute.manager [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 644.962118] env[68674]: DEBUG nova.compute.manager [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 645.020410] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239610, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.303419} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.023295] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 645.026141] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6365df15-449f-46c6-ab35-22f03a6fa0ab {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.052496] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Reconfiguring VM instance instance-0000001b to attach disk [datastore1] 275cdfcc-06f0-4c29-b18b-55cde38480a3/275cdfcc-06f0-4c29-b18b-55cde38480a3.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 645.056858] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5c54d1d5-4cde-40af-ac44-371a183d2223 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.085103] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Waiting for the task: (returnval){ [ 645.085103] env[68674]: value = "task-3239611" [ 645.085103] env[68674]: _type = "Task" [ 645.085103] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.094988] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239611, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.100850] env[68674]: DEBUG oslo_concurrency.lockutils [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 645.596646] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239611, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.713023] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b87f7fe-7de3-4f2b-9104-62e53b32f716 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.719688] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9127b8f-6fc4-4385-814d-86ca6f18ba91 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.754446] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d963132a-0885-42b6-81f1-7b864c8de1ea {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.763458] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d448f0c-406f-4535-aa43-853ee8110835 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.780075] env[68674]: DEBUG nova.compute.provider_tree [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 645.978346] env[68674]: DEBUG nova.compute.manager [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 646.011264] env[68674]: DEBUG nova.virt.hardware [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 646.011842] env[68674]: DEBUG nova.virt.hardware [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 646.012078] env[68674]: DEBUG nova.virt.hardware [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 646.012287] env[68674]: DEBUG nova.virt.hardware [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 646.012464] env[68674]: DEBUG nova.virt.hardware [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 646.012582] env[68674]: DEBUG nova.virt.hardware [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 646.013410] env[68674]: DEBUG nova.virt.hardware [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 646.013410] env[68674]: DEBUG nova.virt.hardware [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 646.013410] env[68674]: DEBUG nova.virt.hardware [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 646.013410] env[68674]: DEBUG nova.virt.hardware [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 646.013410] env[68674]: DEBUG nova.virt.hardware [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 646.014316] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-796d1c0b-9e1e-4b18-a26b-ced13f51811a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.025902] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4603b28b-be1e-45b5-ac27-e3390ea43d40 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.039910] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Instance VIF info [] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 646.045731] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Creating folder: Project (470e6f8e6e0541a29cbd69f36868ddf6). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 646.046222] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-152364a9-2179-4b9d-8c20-615a38bf415d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.060786] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Created folder: Project (470e6f8e6e0541a29cbd69f36868ddf6) in parent group-v647377. [ 646.060786] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Creating folder: Instances. Parent ref: group-v647463. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 646.061034] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4f1b833e-5059-4ff4-9013-ef147e77d527 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.076735] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Created folder: Instances in parent group-v647463. [ 646.077033] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 646.077236] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 646.077466] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d6f272cf-9222-4d13-a033-092d5dda9e15 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.104496] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239611, 'name': ReconfigVM_Task, 'duration_secs': 0.624949} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.106149] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Reconfigured VM instance instance-0000001b to attach disk [datastore1] 275cdfcc-06f0-4c29-b18b-55cde38480a3/275cdfcc-06f0-4c29-b18b-55cde38480a3.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 646.106786] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 646.106786] env[68674]: value = "task-3239614" [ 646.106786] env[68674]: _type = "Task" [ 646.106786] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.106979] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9439e12d-6b14-43c9-8165-c1e18b3fd180 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.121319] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239614, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.122873] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Waiting for the task: (returnval){ [ 646.122873] env[68674]: value = "task-3239615" [ 646.122873] env[68674]: _type = "Task" [ 646.122873] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.131532] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239615, 'name': Rename_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.283972] env[68674]: DEBUG nova.scheduler.client.report [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 646.541170] env[68674]: DEBUG oslo_concurrency.lockutils [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Acquiring lock "5e3f667c-5d3a-4465-9186-779563087480" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 646.541577] env[68674]: DEBUG oslo_concurrency.lockutils [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Lock "5e3f667c-5d3a-4465-9186-779563087480" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 646.620177] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239614, 'name': CreateVM_Task, 'duration_secs': 0.398067} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.620470] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 646.620792] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 646.620946] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 646.621288] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 646.621552] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18b34f05-d642-400e-9a36-3d80f5f3a492 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.631084] env[68674]: DEBUG oslo_vmware.api [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Waiting for the task: (returnval){ [ 646.631084] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525a89c6-c6ca-a1b4-f6ac-eea82322f125" [ 646.631084] env[68674]: _type = "Task" [ 646.631084] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.634660] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239615, 'name': Rename_Task, 'duration_secs': 0.162391} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.638103] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 646.638355] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a3b84cc9-3927-4a44-ad44-2d015988afad {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.645673] env[68674]: DEBUG oslo_vmware.api [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525a89c6-c6ca-a1b4-f6ac-eea82322f125, 'name': SearchDatastore_Task, 'duration_secs': 0.010614} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.647068] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 646.647308] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 646.647539] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 646.647891] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 646.648091] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 646.648435] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Waiting for the task: (returnval){ [ 646.648435] env[68674]: value = "task-3239616" [ 646.648435] env[68674]: _type = "Task" [ 646.648435] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.648799] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d4bd934d-4a91-437c-abec-2ebd41516086 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.660895] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239616, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.664694] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 646.664877] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 646.665645] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8aadd75b-d6db-44d0-97bb-a1e77de0dfd8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.672552] env[68674]: DEBUG oslo_vmware.api [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Waiting for the task: (returnval){ [ 646.672552] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5217d5fc-f034-6114-639d-f696533f9703" [ 646.672552] env[68674]: _type = "Task" [ 646.672552] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.681972] env[68674]: DEBUG oslo_vmware.api [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5217d5fc-f034-6114-639d-f696533f9703, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.789343] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.844s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 646.789861] env[68674]: DEBUG nova.compute.manager [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 646.792532] env[68674]: DEBUG oslo_concurrency.lockutils [None req-265f2fc5-e9ec-42e0-8eb5-ad18b4a987df tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.622s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 646.792746] env[68674]: DEBUG nova.objects.instance [None req-265f2fc5-e9ec-42e0-8eb5-ad18b4a987df tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Lazy-loading 'resources' on Instance uuid f500b495-7bfb-40ff-8a10-e46ca6744902 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 647.165202] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239616, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.190117] env[68674]: DEBUG oslo_vmware.api [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5217d5fc-f034-6114-639d-f696533f9703, 'name': SearchDatastore_Task, 'duration_secs': 0.009833} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.191038] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23140344-7891-49dc-bf29-455e368dadd6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.197057] env[68674]: DEBUG oslo_vmware.api [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Waiting for the task: (returnval){ [ 647.197057] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b63072-d619-04dd-3b01-56343c359987" [ 647.197057] env[68674]: _type = "Task" [ 647.197057] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.206579] env[68674]: DEBUG oslo_vmware.api [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b63072-d619-04dd-3b01-56343c359987, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.296588] env[68674]: DEBUG nova.compute.utils [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 647.306028] env[68674]: DEBUG nova.compute.manager [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 647.306028] env[68674]: DEBUG nova.network.neutron [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 647.482240] env[68674]: DEBUG nova.policy [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '93f0bc7e9a304db3a3ca7be2108ca297', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '84c5a3afe0a94c5798a58ac80091cecb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 647.671700] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239616, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.713337] env[68674]: DEBUG oslo_vmware.api [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b63072-d619-04dd-3b01-56343c359987, 'name': SearchDatastore_Task, 'duration_secs': 0.010346} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.714279] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 647.714591] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] f45200cd-6cb0-498a-8858-1e70177031d9/f45200cd-6cb0-498a-8858-1e70177031d9.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 647.714873] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-241e4a7f-da69-4dbb-8c87-0d6755199cc8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.725517] env[68674]: DEBUG oslo_vmware.api [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Waiting for the task: (returnval){ [ 647.725517] env[68674]: value = "task-3239617" [ 647.725517] env[68674]: _type = "Task" [ 647.725517] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.740521] env[68674]: DEBUG oslo_vmware.api [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': task-3239617, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.808535] env[68674]: DEBUG nova.compute.manager [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 647.966498] env[68674]: DEBUG nova.network.neutron [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Successfully created port: c90fb527-622f-4719-996e-476716d097db {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 648.064407] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eefae80c-6dca-4764-9bd5-2ff032a0c318 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.075248] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6026d492-6d22-4d2c-9c7c-71956ea1d29b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.125696] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2545ad2f-ad40-4f1a-a43d-cc0c7e08398d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.134969] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aa863d2-1cde-4dbf-846f-94e7b8f50e59 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.151569] env[68674]: DEBUG nova.compute.provider_tree [None req-265f2fc5-e9ec-42e0-8eb5-ad18b4a987df tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 648.163488] env[68674]: DEBUG oslo_vmware.api [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239616, 'name': PowerOnVM_Task, 'duration_secs': 1.16423} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.164521] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 648.164745] env[68674]: INFO nova.compute.manager [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Took 9.42 seconds to spawn the instance on the hypervisor. [ 648.164935] env[68674]: DEBUG nova.compute.manager [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 648.165785] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6309facc-b1a9-4293-8add-54a0c43df1dc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.241939] env[68674]: DEBUG oslo_vmware.api [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': task-3239617, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.654688] env[68674]: DEBUG nova.scheduler.client.report [None req-265f2fc5-e9ec-42e0-8eb5-ad18b4a987df tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 648.687773] env[68674]: INFO nova.compute.manager [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Took 40.82 seconds to build instance. [ 648.737861] env[68674]: DEBUG oslo_vmware.api [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': task-3239617, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.552399} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.738116] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] f45200cd-6cb0-498a-8858-1e70177031d9/f45200cd-6cb0-498a-8858-1e70177031d9.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 648.738340] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 648.738585] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8e747c3e-1e67-49cd-9b09-3c2050ae8a9a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.747637] env[68674]: DEBUG oslo_vmware.api [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Waiting for the task: (returnval){ [ 648.747637] env[68674]: value = "task-3239618" [ 648.747637] env[68674]: _type = "Task" [ 648.747637] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.761315] env[68674]: DEBUG oslo_vmware.api [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': task-3239618, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.818335] env[68674]: DEBUG nova.compute.manager [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 648.851550] env[68674]: DEBUG nova.virt.hardware [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 648.851824] env[68674]: DEBUG nova.virt.hardware [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 648.851980] env[68674]: DEBUG nova.virt.hardware [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 648.852209] env[68674]: DEBUG nova.virt.hardware [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 648.852337] env[68674]: DEBUG nova.virt.hardware [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 648.852483] env[68674]: DEBUG nova.virt.hardware [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 648.852711] env[68674]: DEBUG nova.virt.hardware [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 648.852892] env[68674]: DEBUG nova.virt.hardware [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 648.853111] env[68674]: DEBUG nova.virt.hardware [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 648.853238] env[68674]: DEBUG nova.virt.hardware [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 648.853410] env[68674]: DEBUG nova.virt.hardware [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 648.854329] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d5f5f52-a14d-4281-a47c-e18f4f585d3c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.863857] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fb7de5b-b454-4440-9f17-d8a978069770 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.107587] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Acquiring lock "2007222e-e4e5-44b3-bd9e-55b4a2143c3e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 649.107587] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Lock "2007222e-e4e5-44b3-bd9e-55b4a2143c3e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 649.163260] env[68674]: DEBUG oslo_concurrency.lockutils [None req-265f2fc5-e9ec-42e0-8eb5-ad18b4a987df tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.371s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 649.165555] env[68674]: DEBUG oslo_concurrency.lockutils [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.850s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 649.167136] env[68674]: INFO nova.compute.claims [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 649.183210] env[68674]: INFO nova.scheduler.client.report [None req-265f2fc5-e9ec-42e0-8eb5-ad18b4a987df tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Deleted allocations for instance f500b495-7bfb-40ff-8a10-e46ca6744902 [ 649.190687] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00dd5cab-fe58-4f49-871b-8f90c20c61ba tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Lock "275cdfcc-06f0-4c29-b18b-55cde38480a3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.423s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 649.260777] env[68674]: DEBUG oslo_vmware.api [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': task-3239618, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073768} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.260954] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 649.262067] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac92beac-ea2c-4f2a-9f05-c130407b2f9a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.286450] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Reconfiguring VM instance instance-0000001c to attach disk [datastore1] f45200cd-6cb0-498a-8858-1e70177031d9/f45200cd-6cb0-498a-8858-1e70177031d9.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 649.286450] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8c0d7c8c-2b0f-4a2b-961e-348847c7f58a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.315107] env[68674]: DEBUG oslo_vmware.api [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Waiting for the task: (returnval){ [ 649.315107] env[68674]: value = "task-3239619" [ 649.315107] env[68674]: _type = "Task" [ 649.315107] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.326287] env[68674]: DEBUG oslo_vmware.api [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': task-3239619, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.546467] env[68674]: DEBUG nova.compute.manager [req-469094b7-70d4-4416-bb88-394e7d8e790a req-0b35d885-4cab-4ab0-8ea0-9e8734e4126a service nova] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Received event network-vif-plugged-c90fb527-622f-4719-996e-476716d097db {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 649.546467] env[68674]: DEBUG oslo_concurrency.lockutils [req-469094b7-70d4-4416-bb88-394e7d8e790a req-0b35d885-4cab-4ab0-8ea0-9e8734e4126a service nova] Acquiring lock "0097c367-bb3a-4b7b-9fcc-b3e3482689e2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 649.546677] env[68674]: DEBUG oslo_concurrency.lockutils [req-469094b7-70d4-4416-bb88-394e7d8e790a req-0b35d885-4cab-4ab0-8ea0-9e8734e4126a service nova] Lock "0097c367-bb3a-4b7b-9fcc-b3e3482689e2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 649.546848] env[68674]: DEBUG oslo_concurrency.lockutils [req-469094b7-70d4-4416-bb88-394e7d8e790a req-0b35d885-4cab-4ab0-8ea0-9e8734e4126a service nova] Lock "0097c367-bb3a-4b7b-9fcc-b3e3482689e2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 649.547027] env[68674]: DEBUG nova.compute.manager [req-469094b7-70d4-4416-bb88-394e7d8e790a req-0b35d885-4cab-4ab0-8ea0-9e8734e4126a service nova] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] No waiting events found dispatching network-vif-plugged-c90fb527-622f-4719-996e-476716d097db {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 649.547299] env[68674]: WARNING nova.compute.manager [req-469094b7-70d4-4416-bb88-394e7d8e790a req-0b35d885-4cab-4ab0-8ea0-9e8734e4126a service nova] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Received unexpected event network-vif-plugged-c90fb527-622f-4719-996e-476716d097db for instance with vm_state building and task_state spawning. [ 649.643876] env[68674]: DEBUG nova.network.neutron [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Successfully updated port: c90fb527-622f-4719-996e-476716d097db {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 649.692668] env[68674]: DEBUG oslo_concurrency.lockutils [None req-265f2fc5-e9ec-42e0-8eb5-ad18b4a987df tempest-ImagesNegativeTestJSON-226513179 tempest-ImagesNegativeTestJSON-226513179-project-member] Lock "f500b495-7bfb-40ff-8a10-e46ca6744902" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.023s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 649.693762] env[68674]: DEBUG nova.compute.manager [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 649.740588] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d68cd372-617a-438a-bad8-72dff6afbef9 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Acquiring lock "a62237a7-a123-4378-b655-d489ef08474b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 649.740588] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d68cd372-617a-438a-bad8-72dff6afbef9 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Lock "a62237a7-a123-4378-b655-d489ef08474b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 649.740944] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d68cd372-617a-438a-bad8-72dff6afbef9 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Acquiring lock "a62237a7-a123-4378-b655-d489ef08474b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 649.741340] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d68cd372-617a-438a-bad8-72dff6afbef9 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Lock "a62237a7-a123-4378-b655-d489ef08474b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 649.741340] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d68cd372-617a-438a-bad8-72dff6afbef9 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Lock "a62237a7-a123-4378-b655-d489ef08474b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 649.744160] env[68674]: INFO nova.compute.manager [None req-d68cd372-617a-438a-bad8-72dff6afbef9 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: a62237a7-a123-4378-b655-d489ef08474b] Terminating instance [ 649.828361] env[68674]: DEBUG oslo_vmware.api [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': task-3239619, 'name': ReconfigVM_Task, 'duration_secs': 0.501764} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.828361] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Reconfigured VM instance instance-0000001c to attach disk [datastore1] f45200cd-6cb0-498a-8858-1e70177031d9/f45200cd-6cb0-498a-8858-1e70177031d9.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 649.828769] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-47d20654-d5e8-4e56-bfe6-b32dce0d28be {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.837257] env[68674]: DEBUG oslo_vmware.api [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Waiting for the task: (returnval){ [ 649.837257] env[68674]: value = "task-3239620" [ 649.837257] env[68674]: _type = "Task" [ 649.837257] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.847685] env[68674]: DEBUG oslo_vmware.api [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': task-3239620, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.147669] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Acquiring lock "refresh_cache-0097c367-bb3a-4b7b-9fcc-b3e3482689e2" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.147848] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Acquired lock "refresh_cache-0097c367-bb3a-4b7b-9fcc-b3e3482689e2" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 650.148009] env[68674]: DEBUG nova.network.neutron [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 650.212682] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 650.248120] env[68674]: DEBUG nova.compute.manager [None req-d68cd372-617a-438a-bad8-72dff6afbef9 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: a62237a7-a123-4378-b655-d489ef08474b] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 650.248436] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d68cd372-617a-438a-bad8-72dff6afbef9 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: a62237a7-a123-4378-b655-d489ef08474b] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 650.249665] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3019ba6-0f0e-4b3c-bad4-f1de3456f0fd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.262803] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d68cd372-617a-438a-bad8-72dff6afbef9 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: a62237a7-a123-4378-b655-d489ef08474b] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 650.263089] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7547d1a7-347c-45ae-9a88-f7d890183ef9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.270100] env[68674]: DEBUG oslo_vmware.api [None req-d68cd372-617a-438a-bad8-72dff6afbef9 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Waiting for the task: (returnval){ [ 650.270100] env[68674]: value = "task-3239621" [ 650.270100] env[68674]: _type = "Task" [ 650.270100] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.283132] env[68674]: DEBUG oslo_vmware.api [None req-d68cd372-617a-438a-bad8-72dff6afbef9 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239621, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.348493] env[68674]: DEBUG oslo_vmware.api [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': task-3239620, 'name': Rename_Task, 'duration_secs': 0.183921} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.351286] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 650.351736] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4b1220d8-f31a-4e84-963a-5fe3bcaf76f9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.360813] env[68674]: DEBUG oslo_vmware.api [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Waiting for the task: (returnval){ [ 650.360813] env[68674]: value = "task-3239622" [ 650.360813] env[68674]: _type = "Task" [ 650.360813] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.373680] env[68674]: DEBUG oslo_vmware.api [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': task-3239622, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.706475] env[68674]: DEBUG nova.network.neutron [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 650.788227] env[68674]: DEBUG oslo_vmware.api [None req-d68cd372-617a-438a-bad8-72dff6afbef9 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239621, 'name': PowerOffVM_Task, 'duration_secs': 0.242408} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.788346] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d68cd372-617a-438a-bad8-72dff6afbef9 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: a62237a7-a123-4378-b655-d489ef08474b] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 650.788453] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d68cd372-617a-438a-bad8-72dff6afbef9 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: a62237a7-a123-4378-b655-d489ef08474b] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 650.788865] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8434d4d2-4924-41ae-a9d7-429b804fb937 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.836269] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6f6b2fa-04d9-4887-8c55-caa3f575711b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.845767] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a954690c-f5cb-4108-afb6-2e4d9a0dbdc2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.886719] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8ab01f6-8431-4bc5-b7ba-076674dd47bb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.889243] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d68cd372-617a-438a-bad8-72dff6afbef9 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: a62237a7-a123-4378-b655-d489ef08474b] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 650.889516] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d68cd372-617a-438a-bad8-72dff6afbef9 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: a62237a7-a123-4378-b655-d489ef08474b] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 650.889727] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-d68cd372-617a-438a-bad8-72dff6afbef9 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Deleting the datastore file [datastore2] a62237a7-a123-4378-b655-d489ef08474b {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 650.890243] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-69812b9d-7235-4f86-b060-093feb110e54 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.900527] env[68674]: DEBUG oslo_vmware.api [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': task-3239622, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.903009] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7511f40-c5aa-4d05-85a6-e81a570fb293 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.907300] env[68674]: DEBUG oslo_vmware.api [None req-d68cd372-617a-438a-bad8-72dff6afbef9 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Waiting for the task: (returnval){ [ 650.907300] env[68674]: value = "task-3239624" [ 650.907300] env[68674]: _type = "Task" [ 650.907300] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.919330] env[68674]: DEBUG nova.compute.provider_tree [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 650.937406] env[68674]: DEBUG oslo_vmware.api [None req-d68cd372-617a-438a-bad8-72dff6afbef9 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239624, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.982477] env[68674]: DEBUG nova.network.neutron [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Updating instance_info_cache with network_info: [{"id": "c90fb527-622f-4719-996e-476716d097db", "address": "fa:16:3e:94:9d:d6", "network": {"id": "76f04be0-f160-4bca-9fc0-db40ced80221", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1246413254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "84c5a3afe0a94c5798a58ac80091cecb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da0e5087-d65b-416f-90fe-beaa9c534ad3", "external-id": "nsx-vlan-transportzone-522", "segmentation_id": 522, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc90fb527-62", "ovs_interfaceid": "c90fb527-622f-4719-996e-476716d097db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.392629] env[68674]: DEBUG oslo_vmware.api [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': task-3239622, 'name': PowerOnVM_Task, 'duration_secs': 0.625852} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.392909] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 651.393134] env[68674]: INFO nova.compute.manager [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Took 5.41 seconds to spawn the instance on the hypervisor. [ 651.393317] env[68674]: DEBUG nova.compute.manager [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 651.394087] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76b09872-6394-4127-9519-e5bf2090bf87 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.420591] env[68674]: DEBUG oslo_vmware.api [None req-d68cd372-617a-438a-bad8-72dff6afbef9 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239624, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14753} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.421086] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-d68cd372-617a-438a-bad8-72dff6afbef9 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 651.421447] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d68cd372-617a-438a-bad8-72dff6afbef9 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: a62237a7-a123-4378-b655-d489ef08474b] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 651.421796] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d68cd372-617a-438a-bad8-72dff6afbef9 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: a62237a7-a123-4378-b655-d489ef08474b] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 651.422133] env[68674]: INFO nova.compute.manager [None req-d68cd372-617a-438a-bad8-72dff6afbef9 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: a62237a7-a123-4378-b655-d489ef08474b] Took 1.17 seconds to destroy the instance on the hypervisor. [ 651.422612] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d68cd372-617a-438a-bad8-72dff6afbef9 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 651.424595] env[68674]: DEBUG nova.compute.manager [-] [instance: a62237a7-a123-4378-b655-d489ef08474b] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 651.424832] env[68674]: DEBUG nova.network.neutron [-] [instance: a62237a7-a123-4378-b655-d489ef08474b] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 651.449287] env[68674]: ERROR nova.scheduler.client.report [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [req-5b604943-8991-474a-a743-95a826e23150] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ade3f042-7427-494b-9654-0b65e074850c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5b604943-8991-474a-a743-95a826e23150"}]} [ 651.469728] env[68674]: DEBUG nova.scheduler.client.report [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Refreshing inventories for resource provider ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 651.483171] env[68674]: DEBUG nova.scheduler.client.report [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Updating ProviderTree inventory for provider ade3f042-7427-494b-9654-0b65e074850c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 651.484032] env[68674]: DEBUG nova.compute.provider_tree [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 651.486383] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Releasing lock "refresh_cache-0097c367-bb3a-4b7b-9fcc-b3e3482689e2" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 651.486812] env[68674]: DEBUG nova.compute.manager [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Instance network_info: |[{"id": "c90fb527-622f-4719-996e-476716d097db", "address": "fa:16:3e:94:9d:d6", "network": {"id": "76f04be0-f160-4bca-9fc0-db40ced80221", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1246413254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "84c5a3afe0a94c5798a58ac80091cecb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da0e5087-d65b-416f-90fe-beaa9c534ad3", "external-id": "nsx-vlan-transportzone-522", "segmentation_id": 522, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc90fb527-62", "ovs_interfaceid": "c90fb527-622f-4719-996e-476716d097db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 651.490013] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:94:9d:d6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'da0e5087-d65b-416f-90fe-beaa9c534ad3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c90fb527-622f-4719-996e-476716d097db', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 651.496207] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Creating folder: Project (84c5a3afe0a94c5798a58ac80091cecb). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 651.496633] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cefbe7ac-aa06-4346-afc1-3815e80c3d81 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.505017] env[68674]: DEBUG nova.scheduler.client.report [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Refreshing aggregate associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, aggregates: None {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 651.511816] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Created folder: Project (84c5a3afe0a94c5798a58ac80091cecb) in parent group-v647377. [ 651.511816] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Creating folder: Instances. Parent ref: group-v647466. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 651.511816] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cf43b0a7-deaa-4a64-8a03-98a47e3ea199 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.523356] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Created folder: Instances in parent group-v647466. [ 651.523356] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 651.523356] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 651.523356] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cfa39f73-bc9e-4b97-ad76-10865325e3d9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.542332] env[68674]: DEBUG nova.scheduler.client.report [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Refreshing trait associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 651.552705] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 651.552705] env[68674]: value = "task-3239627" [ 651.552705] env[68674]: _type = "Task" [ 651.552705] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.565650] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239627, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.575311] env[68674]: DEBUG nova.compute.manager [req-6724b0a3-0085-45d6-b80d-4b21e5782e66 req-33b000be-686a-4cd7-a158-a068a6ce04fc service nova] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Received event network-changed-c90fb527-622f-4719-996e-476716d097db {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 651.575580] env[68674]: DEBUG nova.compute.manager [req-6724b0a3-0085-45d6-b80d-4b21e5782e66 req-33b000be-686a-4cd7-a158-a068a6ce04fc service nova] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Refreshing instance network info cache due to event network-changed-c90fb527-622f-4719-996e-476716d097db. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 651.575893] env[68674]: DEBUG oslo_concurrency.lockutils [req-6724b0a3-0085-45d6-b80d-4b21e5782e66 req-33b000be-686a-4cd7-a158-a068a6ce04fc service nova] Acquiring lock "refresh_cache-0097c367-bb3a-4b7b-9fcc-b3e3482689e2" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 651.576105] env[68674]: DEBUG oslo_concurrency.lockutils [req-6724b0a3-0085-45d6-b80d-4b21e5782e66 req-33b000be-686a-4cd7-a158-a068a6ce04fc service nova] Acquired lock "refresh_cache-0097c367-bb3a-4b7b-9fcc-b3e3482689e2" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 651.576326] env[68674]: DEBUG nova.network.neutron [req-6724b0a3-0085-45d6-b80d-4b21e5782e66 req-33b000be-686a-4cd7-a158-a068a6ce04fc service nova] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Refreshing network info cache for port c90fb527-622f-4719-996e-476716d097db {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 651.916358] env[68674]: INFO nova.compute.manager [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Took 40.80 seconds to build instance. [ 652.067395] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239627, 'name': CreateVM_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.246304] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4978bc93-421a-49d8-91e2-1b79dbd46d0d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.258622] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bb895b5-f1d6-459c-8a17-ecf4be555390 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.261863] env[68674]: DEBUG nova.network.neutron [-] [instance: a62237a7-a123-4378-b655-d489ef08474b] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.295599] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-919f85cd-d792-4044-b6bc-7236f743fcf9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.304752] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0f9b915-dddf-4907-b1da-f259a105db63 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.320388] env[68674]: DEBUG nova.compute.provider_tree [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 652.383972] env[68674]: DEBUG nova.network.neutron [req-6724b0a3-0085-45d6-b80d-4b21e5782e66 req-33b000be-686a-4cd7-a158-a068a6ce04fc service nova] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Updated VIF entry in instance network info cache for port c90fb527-622f-4719-996e-476716d097db. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 652.384366] env[68674]: DEBUG nova.network.neutron [req-6724b0a3-0085-45d6-b80d-4b21e5782e66 req-33b000be-686a-4cd7-a158-a068a6ce04fc service nova] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Updating instance_info_cache with network_info: [{"id": "c90fb527-622f-4719-996e-476716d097db", "address": "fa:16:3e:94:9d:d6", "network": {"id": "76f04be0-f160-4bca-9fc0-db40ced80221", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1246413254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "84c5a3afe0a94c5798a58ac80091cecb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da0e5087-d65b-416f-90fe-beaa9c534ad3", "external-id": "nsx-vlan-transportzone-522", "segmentation_id": 522, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc90fb527-62", "ovs_interfaceid": "c90fb527-622f-4719-996e-476716d097db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.418744] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bbd941d8-83c0-4660-a590-7a885d7ecde7 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Lock "f45200cd-6cb0-498a-8858-1e70177031d9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.674s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 652.442180] env[68674]: INFO nova.compute.manager [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Rebuilding instance [ 652.487936] env[68674]: DEBUG nova.compute.manager [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 652.488880] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79a34f27-00fa-430c-9e95-199969459779 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.568686] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239627, 'name': CreateVM_Task, 'duration_secs': 0.671218} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.568938] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 652.569658] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.569873] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 652.570261] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 652.570572] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88550a6f-632e-4210-9637-60aca7e5c5ad {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.575608] env[68674]: DEBUG oslo_vmware.api [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Waiting for the task: (returnval){ [ 652.575608] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52983710-ab53-e172-01c2-f03e47ff644d" [ 652.575608] env[68674]: _type = "Task" [ 652.575608] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.583712] env[68674]: DEBUG oslo_vmware.api [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52983710-ab53-e172-01c2-f03e47ff644d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.764611] env[68674]: INFO nova.compute.manager [-] [instance: a62237a7-a123-4378-b655-d489ef08474b] Took 1.34 seconds to deallocate network for instance. [ 652.825490] env[68674]: DEBUG nova.scheduler.client.report [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 652.886759] env[68674]: DEBUG oslo_concurrency.lockutils [req-6724b0a3-0085-45d6-b80d-4b21e5782e66 req-33b000be-686a-4cd7-a158-a068a6ce04fc service nova] Releasing lock "refresh_cache-0097c367-bb3a-4b7b-9fcc-b3e3482689e2" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 652.921109] env[68674]: DEBUG nova.compute.manager [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 653.086960] env[68674]: DEBUG oslo_vmware.api [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52983710-ab53-e172-01c2-f03e47ff644d, 'name': SearchDatastore_Task, 'duration_secs': 0.012138} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.087294] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 653.087541] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 653.087809] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.087960] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 653.088156] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 653.088468] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7a87e23c-d82a-4f3d-b998-bbb42b50715b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.098715] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 653.098896] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 653.099759] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c20a67e-a87f-472b-b6e2-9a76eeaa293b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.105523] env[68674]: DEBUG oslo_vmware.api [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Waiting for the task: (returnval){ [ 653.105523] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523efe2d-cda2-09a1-f829-a694025387ad" [ 653.105523] env[68674]: _type = "Task" [ 653.105523] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.113916] env[68674]: DEBUG oslo_vmware.api [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523efe2d-cda2-09a1-f829-a694025387ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.271434] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d68cd372-617a-438a-bad8-72dff6afbef9 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 653.330629] env[68674]: DEBUG oslo_concurrency.lockutils [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.165s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 653.331174] env[68674]: DEBUG nova.compute.manager [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 653.334354] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.140s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 653.335423] env[68674]: INFO nova.compute.claims [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 653.444202] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 653.506041] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 653.506224] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2ceee831-cdaa-4daf-976e-e6b640c86963 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.516536] env[68674]: DEBUG oslo_vmware.api [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Waiting for the task: (returnval){ [ 653.516536] env[68674]: value = "task-3239628" [ 653.516536] env[68674]: _type = "Task" [ 653.516536] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.526492] env[68674]: DEBUG oslo_vmware.api [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': task-3239628, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.617394] env[68674]: DEBUG oslo_vmware.api [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523efe2d-cda2-09a1-f829-a694025387ad, 'name': SearchDatastore_Task, 'duration_secs': 0.010055} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.618361] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a201ec00-70ba-4bda-8394-3712321d4b68 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.624991] env[68674]: DEBUG oslo_vmware.api [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Waiting for the task: (returnval){ [ 653.624991] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c4e68d-3d8a-b814-c83c-4082ba5c9cb7" [ 653.624991] env[68674]: _type = "Task" [ 653.624991] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.632662] env[68674]: DEBUG nova.compute.manager [req-b3a4c530-0408-4349-bc4e-50d8e5c09da9 req-0068ffcd-472b-41e0-a2f7-e433374b5644 service nova] [instance: a62237a7-a123-4378-b655-d489ef08474b] Received event network-vif-deleted-3be77e6c-a481-49ae-b015-141b0a40fe08 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 653.639425] env[68674]: DEBUG oslo_vmware.api [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c4e68d-3d8a-b814-c83c-4082ba5c9cb7, 'name': SearchDatastore_Task, 'duration_secs': 0.01007} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.639846] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 653.640223] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 0097c367-bb3a-4b7b-9fcc-b3e3482689e2/0097c367-bb3a-4b7b-9fcc-b3e3482689e2.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 653.640699] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-454265fc-a8f3-4ac1-86d4-4ce30419588a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.650033] env[68674]: DEBUG oslo_vmware.api [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Waiting for the task: (returnval){ [ 653.650033] env[68674]: value = "task-3239629" [ 653.650033] env[68674]: _type = "Task" [ 653.650033] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.656988] env[68674]: DEBUG oslo_vmware.api [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Task: {'id': task-3239629, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.839793] env[68674]: DEBUG nova.compute.utils [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 653.841715] env[68674]: DEBUG nova.compute.manager [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 653.842097] env[68674]: DEBUG nova.network.neutron [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 654.026676] env[68674]: DEBUG oslo_vmware.api [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': task-3239628, 'name': PowerOffVM_Task, 'duration_secs': 0.183553} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.026676] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 654.026676] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 654.027909] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5ce4100-4a10-4209-bfe9-7d2cd6183d19 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.039019] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 654.039019] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d8ac7ec7-cc6a-461a-a76e-9387bb571d28 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.064963] env[68674]: DEBUG nova.policy [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fd6c4d1912754a2ea44a65b455b7413c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '21163cbc3a5a4dc3abc832c4560c33e2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 654.070021] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 654.070021] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 654.070021] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Deleting the datastore file [datastore1] f45200cd-6cb0-498a-8858-1e70177031d9 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 654.070021] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2912b305-72b4-446e-99e8-de31a859ae2e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.075332] env[68674]: DEBUG oslo_vmware.api [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Waiting for the task: (returnval){ [ 654.075332] env[68674]: value = "task-3239631" [ 654.075332] env[68674]: _type = "Task" [ 654.075332] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.083614] env[68674]: DEBUG oslo_vmware.api [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': task-3239631, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.160513] env[68674]: DEBUG oslo_vmware.api [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Task: {'id': task-3239629, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.484791} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.160823] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 0097c367-bb3a-4b7b-9fcc-b3e3482689e2/0097c367-bb3a-4b7b-9fcc-b3e3482689e2.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 654.161202] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 654.161768] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d6f9579c-8e85-40ca-bab8-f3c3873c9b78 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.169831] env[68674]: DEBUG oslo_vmware.api [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Waiting for the task: (returnval){ [ 654.169831] env[68674]: value = "task-3239632" [ 654.169831] env[68674]: _type = "Task" [ 654.169831] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.178709] env[68674]: DEBUG oslo_vmware.api [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Task: {'id': task-3239632, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.348822] env[68674]: DEBUG nova.compute.manager [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 654.466751] env[68674]: DEBUG nova.network.neutron [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Successfully created port: 55160236-eb1d-47d3-bca8-d3b46267c37f {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 654.587866] env[68674]: DEBUG oslo_vmware.api [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': task-3239631, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1353} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.590257] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 654.590481] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 654.590668] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 654.689730] env[68674]: DEBUG oslo_vmware.api [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Task: {'id': task-3239632, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07973} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.692478] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 654.695041] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a0c67f4-6e3b-4ea7-a0a0-99564435887d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.715704] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Reconfiguring VM instance instance-0000001d to attach disk [datastore2] 0097c367-bb3a-4b7b-9fcc-b3e3482689e2/0097c367-bb3a-4b7b-9fcc-b3e3482689e2.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 654.719269] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a69b7c76-6875-40e6-87e8-09f77ef0c89b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.739978] env[68674]: DEBUG oslo_vmware.api [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Waiting for the task: (returnval){ [ 654.739978] env[68674]: value = "task-3239633" [ 654.739978] env[68674]: _type = "Task" [ 654.739978] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.750996] env[68674]: DEBUG oslo_vmware.api [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Task: {'id': task-3239633, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.001407] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1062fbb4-9f13-45e5-8716-434a3511d528 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Acquiring lock "ae945f3f-fde8-4b25-a5bd-81014fc99690" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 655.001713] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1062fbb4-9f13-45e5-8716-434a3511d528 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Lock "ae945f3f-fde8-4b25-a5bd-81014fc99690" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 655.001924] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1062fbb4-9f13-45e5-8716-434a3511d528 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Acquiring lock "ae945f3f-fde8-4b25-a5bd-81014fc99690-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 655.002118] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1062fbb4-9f13-45e5-8716-434a3511d528 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Lock "ae945f3f-fde8-4b25-a5bd-81014fc99690-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 655.002288] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1062fbb4-9f13-45e5-8716-434a3511d528 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Lock "ae945f3f-fde8-4b25-a5bd-81014fc99690-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 655.006806] env[68674]: INFO nova.compute.manager [None req-1062fbb4-9f13-45e5-8716-434a3511d528 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Terminating instance [ 655.017304] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-733257f7-cd71-4fa4-a5f4-79735c184dc0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.028383] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98098e09-3683-480a-ab12-be1d53d3b868 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.065153] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d2add45-d13a-42ff-ae3b-31e05727634b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.073526] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82cc7835-cd02-4fca-8556-35ecbbf3fe41 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.089629] env[68674]: DEBUG nova.compute.provider_tree [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 655.249499] env[68674]: DEBUG oslo_vmware.api [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Task: {'id': task-3239633, 'name': ReconfigVM_Task, 'duration_secs': 0.349727} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.249813] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Reconfigured VM instance instance-0000001d to attach disk [datastore2] 0097c367-bb3a-4b7b-9fcc-b3e3482689e2/0097c367-bb3a-4b7b-9fcc-b3e3482689e2.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 655.250503] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-75c035b8-6967-4e72-8e35-d20d4371f862 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.257226] env[68674]: DEBUG oslo_vmware.api [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Waiting for the task: (returnval){ [ 655.257226] env[68674]: value = "task-3239634" [ 655.257226] env[68674]: _type = "Task" [ 655.257226] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.265922] env[68674]: DEBUG oslo_vmware.api [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Task: {'id': task-3239634, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.365436] env[68674]: DEBUG nova.compute.manager [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 655.396803] env[68674]: DEBUG nova.virt.hardware [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 655.397073] env[68674]: DEBUG nova.virt.hardware [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 655.397236] env[68674]: DEBUG nova.virt.hardware [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 655.397414] env[68674]: DEBUG nova.virt.hardware [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 655.397563] env[68674]: DEBUG nova.virt.hardware [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 655.397709] env[68674]: DEBUG nova.virt.hardware [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 655.397917] env[68674]: DEBUG nova.virt.hardware [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 655.398405] env[68674]: DEBUG nova.virt.hardware [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 655.398663] env[68674]: DEBUG nova.virt.hardware [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 655.398848] env[68674]: DEBUG nova.virt.hardware [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 655.399383] env[68674]: DEBUG nova.virt.hardware [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 655.400273] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dda6ddd9-5d1d-4b64-af0f-98d0f31c818c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.409444] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eae0e69-dd18-4a52-875e-061dc0335534 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.513719] env[68674]: DEBUG nova.compute.manager [None req-1062fbb4-9f13-45e5-8716-434a3511d528 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 655.513974] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1062fbb4-9f13-45e5-8716-434a3511d528 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 655.514857] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76cb86bb-7465-423f-ba8d-51dd708954e0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.524282] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1062fbb4-9f13-45e5-8716-434a3511d528 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 655.524552] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b65058f8-c53c-42a5-8fe9-5c674857a62f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.533061] env[68674]: DEBUG oslo_vmware.api [None req-1062fbb4-9f13-45e5-8716-434a3511d528 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Waiting for the task: (returnval){ [ 655.533061] env[68674]: value = "task-3239635" [ 655.533061] env[68674]: _type = "Task" [ 655.533061] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.549482] env[68674]: DEBUG oslo_vmware.api [None req-1062fbb4-9f13-45e5-8716-434a3511d528 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Task: {'id': task-3239635, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.592993] env[68674]: DEBUG nova.scheduler.client.report [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 655.628710] env[68674]: DEBUG nova.virt.hardware [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 655.628980] env[68674]: DEBUG nova.virt.hardware [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 655.629911] env[68674]: DEBUG nova.virt.hardware [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 655.629911] env[68674]: DEBUG nova.virt.hardware [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 655.629911] env[68674]: DEBUG nova.virt.hardware [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 655.629911] env[68674]: DEBUG nova.virt.hardware [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 655.629911] env[68674]: DEBUG nova.virt.hardware [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 655.630216] env[68674]: DEBUG nova.virt.hardware [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 655.630216] env[68674]: DEBUG nova.virt.hardware [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 655.630319] env[68674]: DEBUG nova.virt.hardware [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 655.630427] env[68674]: DEBUG nova.virt.hardware [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 655.631410] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c86b91b6-96c7-42bf-b8a3-4e58cc3c6ad2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.649018] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db97c613-d38c-4594-8ef9-a17bb9c9b2ab {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.665312] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Instance VIF info [] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 655.675583] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 655.676120] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 655.676510] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f672b423-ece4-4b22-b8ef-a181ed387722 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.708168] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 655.708168] env[68674]: value = "task-3239636" [ 655.708168] env[68674]: _type = "Task" [ 655.708168] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.718759] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239636, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.768509] env[68674]: DEBUG oslo_vmware.api [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Task: {'id': task-3239634, 'name': Rename_Task, 'duration_secs': 0.155263} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.768845] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 655.769109] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4d1d93ea-e687-4f78-af6e-550c552a6a27 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.776584] env[68674]: DEBUG oslo_vmware.api [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Waiting for the task: (returnval){ [ 655.776584] env[68674]: value = "task-3239637" [ 655.776584] env[68674]: _type = "Task" [ 655.776584] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.045188] env[68674]: DEBUG oslo_vmware.api [None req-1062fbb4-9f13-45e5-8716-434a3511d528 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Task: {'id': task-3239635, 'name': PowerOffVM_Task, 'duration_secs': 0.244125} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.045507] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1062fbb4-9f13-45e5-8716-434a3511d528 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 656.045734] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1062fbb4-9f13-45e5-8716-434a3511d528 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 656.046067] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-956ed7ef-9ab0-47a1-83ee-5d9b6f21e5b3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.098585] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.764s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 656.099213] env[68674]: DEBUG nova.compute.manager [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 656.103651] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.904s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 656.107551] env[68674]: INFO nova.compute.claims [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 656.113190] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1062fbb4-9f13-45e5-8716-434a3511d528 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 656.113423] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1062fbb4-9f13-45e5-8716-434a3511d528 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 656.113605] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-1062fbb4-9f13-45e5-8716-434a3511d528 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Deleting the datastore file [datastore1] ae945f3f-fde8-4b25-a5bd-81014fc99690 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 656.115163] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-994c8b40-be8d-4d45-9752-10ac549faaf0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.123866] env[68674]: DEBUG oslo_vmware.api [None req-1062fbb4-9f13-45e5-8716-434a3511d528 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Waiting for the task: (returnval){ [ 656.123866] env[68674]: value = "task-3239639" [ 656.123866] env[68674]: _type = "Task" [ 656.123866] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.136485] env[68674]: DEBUG oslo_vmware.api [None req-1062fbb4-9f13-45e5-8716-434a3511d528 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Task: {'id': task-3239639, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.184153] env[68674]: DEBUG nova.compute.manager [req-cb0b386a-2434-41c1-b7fe-bbfa96c44b4c req-7b7d78aa-ea53-46cc-9dbb-690da8f1b26b service nova] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Received event network-vif-plugged-55160236-eb1d-47d3-bca8-d3b46267c37f {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 656.184153] env[68674]: DEBUG oslo_concurrency.lockutils [req-cb0b386a-2434-41c1-b7fe-bbfa96c44b4c req-7b7d78aa-ea53-46cc-9dbb-690da8f1b26b service nova] Acquiring lock "045e54ff-9e2c-4b04-afac-34cb6580cb2c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 656.184568] env[68674]: DEBUG oslo_concurrency.lockutils [req-cb0b386a-2434-41c1-b7fe-bbfa96c44b4c req-7b7d78aa-ea53-46cc-9dbb-690da8f1b26b service nova] Lock "045e54ff-9e2c-4b04-afac-34cb6580cb2c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 656.184985] env[68674]: DEBUG oslo_concurrency.lockutils [req-cb0b386a-2434-41c1-b7fe-bbfa96c44b4c req-7b7d78aa-ea53-46cc-9dbb-690da8f1b26b service nova] Lock "045e54ff-9e2c-4b04-afac-34cb6580cb2c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 656.185301] env[68674]: DEBUG nova.compute.manager [req-cb0b386a-2434-41c1-b7fe-bbfa96c44b4c req-7b7d78aa-ea53-46cc-9dbb-690da8f1b26b service nova] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] No waiting events found dispatching network-vif-plugged-55160236-eb1d-47d3-bca8-d3b46267c37f {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 656.185573] env[68674]: WARNING nova.compute.manager [req-cb0b386a-2434-41c1-b7fe-bbfa96c44b4c req-7b7d78aa-ea53-46cc-9dbb-690da8f1b26b service nova] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Received unexpected event network-vif-plugged-55160236-eb1d-47d3-bca8-d3b46267c37f for instance with vm_state building and task_state spawning. [ 656.201221] env[68674]: DEBUG nova.network.neutron [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Successfully updated port: 55160236-eb1d-47d3-bca8-d3b46267c37f {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 656.220439] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239636, 'name': CreateVM_Task, 'duration_secs': 0.320097} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.220613] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 656.220961] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.221149] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 656.221477] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 656.221767] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf79abe9-e11b-4631-b25d-a75d4aa292f6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.228384] env[68674]: DEBUG oslo_vmware.api [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Waiting for the task: (returnval){ [ 656.228384] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52befacc-a04f-fd88-9c18-0768237437c1" [ 656.228384] env[68674]: _type = "Task" [ 656.228384] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.238801] env[68674]: DEBUG oslo_vmware.api [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52befacc-a04f-fd88-9c18-0768237437c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.288163] env[68674]: DEBUG oslo_vmware.api [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Task: {'id': task-3239637, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.391503] env[68674]: DEBUG oslo_concurrency.lockutils [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Acquiring lock "714142ec-89ad-44ab-8543-11493172a50b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 656.391503] env[68674]: DEBUG oslo_concurrency.lockutils [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Lock "714142ec-89ad-44ab-8543-11493172a50b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 656.613259] env[68674]: DEBUG nova.compute.utils [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 656.615625] env[68674]: DEBUG nova.compute.manager [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 656.615859] env[68674]: DEBUG nova.network.neutron [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 656.636559] env[68674]: DEBUG oslo_vmware.api [None req-1062fbb4-9f13-45e5-8716-434a3511d528 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Task: {'id': task-3239639, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.230384} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.638502] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-1062fbb4-9f13-45e5-8716-434a3511d528 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 656.638706] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1062fbb4-9f13-45e5-8716-434a3511d528 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 656.638882] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1062fbb4-9f13-45e5-8716-434a3511d528 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 656.639061] env[68674]: INFO nova.compute.manager [None req-1062fbb4-9f13-45e5-8716-434a3511d528 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Took 1.13 seconds to destroy the instance on the hypervisor. [ 656.639300] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1062fbb4-9f13-45e5-8716-434a3511d528 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 656.639484] env[68674]: DEBUG nova.compute.manager [-] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 656.639576] env[68674]: DEBUG nova.network.neutron [-] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 656.703927] env[68674]: DEBUG oslo_concurrency.lockutils [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "refresh_cache-045e54ff-9e2c-4b04-afac-34cb6580cb2c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.704112] env[68674]: DEBUG oslo_concurrency.lockutils [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquired lock "refresh_cache-045e54ff-9e2c-4b04-afac-34cb6580cb2c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 656.704267] env[68674]: DEBUG nova.network.neutron [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 656.715219] env[68674]: DEBUG nova.policy [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a872f1b7c99f48bd821993386afdf84f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '22d2d9d6bfcd4f219b02b1356b36123e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 656.738645] env[68674]: DEBUG oslo_vmware.api [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52befacc-a04f-fd88-9c18-0768237437c1, 'name': SearchDatastore_Task, 'duration_secs': 0.016591} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.738817] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 656.739060] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 656.739289] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.739434] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 656.739608] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 656.739866] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e1efb3f0-3a8b-4d27-bef5-2314b22d3b0c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.754139] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 656.754282] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 656.754995] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ae03403-7ba4-4528-a3a0-3747f7ee5274 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.760087] env[68674]: DEBUG oslo_vmware.api [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Waiting for the task: (returnval){ [ 656.760087] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528103d0-e2ae-348d-2a76-8d162bead60f" [ 656.760087] env[68674]: _type = "Task" [ 656.760087] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.767323] env[68674]: DEBUG oslo_vmware.api [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528103d0-e2ae-348d-2a76-8d162bead60f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.784701] env[68674]: DEBUG oslo_vmware.api [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Task: {'id': task-3239637, 'name': PowerOnVM_Task, 'duration_secs': 0.534827} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.785323] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 656.785323] env[68674]: INFO nova.compute.manager [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Took 7.97 seconds to spawn the instance on the hypervisor. [ 656.785465] env[68674]: DEBUG nova.compute.manager [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 656.791844] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66b0c7f9-65c8-4323-a938-84a6daf2bb13 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.116472] env[68674]: DEBUG nova.compute.manager [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 657.253821] env[68674]: DEBUG nova.network.neutron [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 657.274488] env[68674]: DEBUG oslo_vmware.api [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528103d0-e2ae-348d-2a76-8d162bead60f, 'name': SearchDatastore_Task, 'duration_secs': 0.037234} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.275563] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfe80c05-bebb-43bf-9b7b-8742c5b3a6f8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.291263] env[68674]: DEBUG oslo_vmware.api [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Waiting for the task: (returnval){ [ 657.291263] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5259ce29-6298-208a-4a34-825a4a7bd703" [ 657.291263] env[68674]: _type = "Task" [ 657.291263] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.314409] env[68674]: DEBUG oslo_vmware.api [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5259ce29-6298-208a-4a34-825a4a7bd703, 'name': SearchDatastore_Task, 'duration_secs': 0.010189} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.314985] env[68674]: INFO nova.compute.manager [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Took 45.14 seconds to build instance. [ 657.316101] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 657.316490] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] f45200cd-6cb0-498a-8858-1e70177031d9/f45200cd-6cb0-498a-8858-1e70177031d9.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 657.317013] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4c3cb44a-2abd-47c1-9247-0833c6088382 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.325718] env[68674]: DEBUG oslo_vmware.api [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Waiting for the task: (returnval){ [ 657.325718] env[68674]: value = "task-3239640" [ 657.325718] env[68674]: _type = "Task" [ 657.325718] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.341986] env[68674]: DEBUG oslo_vmware.api [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': task-3239640, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.493860] env[68674]: DEBUG nova.network.neutron [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Updating instance_info_cache with network_info: [{"id": "55160236-eb1d-47d3-bca8-d3b46267c37f", "address": "fa:16:3e:3e:26:3e", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55160236-eb", "ovs_interfaceid": "55160236-eb1d-47d3-bca8-d3b46267c37f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 657.600317] env[68674]: DEBUG nova.network.neutron [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Successfully created port: b1a3099e-550f-4bc4-a4b5-1fe1e04ea342 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 657.809094] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-333f1917-db7d-4868-90cc-244899abe683 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.817776] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ccf5e16-166a-475c-9d0e-728bfb92c6b2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.821972] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a75a6700-73f0-42a0-a157-7f2e64b5b5b2 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Lock "0097c367-bb3a-4b7b-9fcc-b3e3482689e2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.883s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 657.860305] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3df9c736-4549-4474-9f47-0ef333b9149c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.866528] env[68674]: DEBUG oslo_vmware.api [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': task-3239640, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.479917} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.866908] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] f45200cd-6cb0-498a-8858-1e70177031d9/f45200cd-6cb0-498a-8858-1e70177031d9.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 657.867160] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 657.867435] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-61c6dce8-2cc9-48ee-96ab-eb061fb4f3f7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.872934] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd29df79-6470-4e39-9fd9-9cf4a76ba17c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.880053] env[68674]: DEBUG oslo_vmware.api [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Waiting for the task: (returnval){ [ 657.880053] env[68674]: value = "task-3239641" [ 657.880053] env[68674]: _type = "Task" [ 657.880053] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.892568] env[68674]: DEBUG nova.compute.provider_tree [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 657.902222] env[68674]: DEBUG oslo_vmware.api [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': task-3239641, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.997011] env[68674]: DEBUG oslo_concurrency.lockutils [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Releasing lock "refresh_cache-045e54ff-9e2c-4b04-afac-34cb6580cb2c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 657.997366] env[68674]: DEBUG nova.compute.manager [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Instance network_info: |[{"id": "55160236-eb1d-47d3-bca8-d3b46267c37f", "address": "fa:16:3e:3e:26:3e", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55160236-eb", "ovs_interfaceid": "55160236-eb1d-47d3-bca8-d3b46267c37f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 657.997794] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3e:26:3e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea4fe416-47a6-4542-b59d-8c71ab4d6503', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '55160236-eb1d-47d3-bca8-d3b46267c37f', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 658.005704] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Creating folder: Project (21163cbc3a5a4dc3abc832c4560c33e2). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 658.005784] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ca60c43b-cdd8-4b94-976f-437c242f3ab0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.019015] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Created folder: Project (21163cbc3a5a4dc3abc832c4560c33e2) in parent group-v647377. [ 658.019212] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Creating folder: Instances. Parent ref: group-v647470. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 658.019480] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d9c9572e-10ca-4e35-a856-c070aaccec29 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.027898] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Created folder: Instances in parent group-v647470. [ 658.028134] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 658.028327] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 658.028533] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f8ef5e35-519d-4979-a97a-40329e87c0c0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.048385] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 658.048385] env[68674]: value = "task-3239644" [ 658.048385] env[68674]: _type = "Task" [ 658.048385] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.055779] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239644, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.075260] env[68674]: DEBUG nova.network.neutron [-] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 658.130607] env[68674]: DEBUG nova.compute.manager [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 658.157086] env[68674]: DEBUG nova.virt.hardware [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 658.157359] env[68674]: DEBUG nova.virt.hardware [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 658.157581] env[68674]: DEBUG nova.virt.hardware [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 658.157712] env[68674]: DEBUG nova.virt.hardware [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 658.157861] env[68674]: DEBUG nova.virt.hardware [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 658.158016] env[68674]: DEBUG nova.virt.hardware [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 658.158423] env[68674]: DEBUG nova.virt.hardware [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 658.158633] env[68674]: DEBUG nova.virt.hardware [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 658.159318] env[68674]: DEBUG nova.virt.hardware [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 658.159318] env[68674]: DEBUG nova.virt.hardware [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 658.159318] env[68674]: DEBUG nova.virt.hardware [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 658.160653] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c5ada8b-71b1-4702-a0cf-627886012471 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.169361] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79a80769-c2c0-4e65-b457-a9f9ddac5d04 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.255799] env[68674]: DEBUG nova.compute.manager [req-4d5f7e19-5b2b-4454-b8bb-0bbad1cd8ea9 req-cbc06d32-8c3d-42e4-8ca8-2a76f5a252c0 service nova] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Received event network-changed-55160236-eb1d-47d3-bca8-d3b46267c37f {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 658.256231] env[68674]: DEBUG nova.compute.manager [req-4d5f7e19-5b2b-4454-b8bb-0bbad1cd8ea9 req-cbc06d32-8c3d-42e4-8ca8-2a76f5a252c0 service nova] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Refreshing instance network info cache due to event network-changed-55160236-eb1d-47d3-bca8-d3b46267c37f. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 658.256496] env[68674]: DEBUG oslo_concurrency.lockutils [req-4d5f7e19-5b2b-4454-b8bb-0bbad1cd8ea9 req-cbc06d32-8c3d-42e4-8ca8-2a76f5a252c0 service nova] Acquiring lock "refresh_cache-045e54ff-9e2c-4b04-afac-34cb6580cb2c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.256687] env[68674]: DEBUG oslo_concurrency.lockutils [req-4d5f7e19-5b2b-4454-b8bb-0bbad1cd8ea9 req-cbc06d32-8c3d-42e4-8ca8-2a76f5a252c0 service nova] Acquired lock "refresh_cache-045e54ff-9e2c-4b04-afac-34cb6580cb2c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 658.256811] env[68674]: DEBUG nova.network.neutron [req-4d5f7e19-5b2b-4454-b8bb-0bbad1cd8ea9 req-cbc06d32-8c3d-42e4-8ca8-2a76f5a252c0 service nova] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Refreshing network info cache for port 55160236-eb1d-47d3-bca8-d3b46267c37f {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 658.328167] env[68674]: DEBUG nova.compute.manager [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 658.389611] env[68674]: DEBUG oslo_vmware.api [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': task-3239641, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069391} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.389916] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 658.390728] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86ed12ad-e618-423d-a71f-e598d5988c72 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.405969] env[68674]: DEBUG nova.scheduler.client.report [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 658.419152] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Reconfiguring VM instance instance-0000001c to attach disk [datastore2] f45200cd-6cb0-498a-8858-1e70177031d9/f45200cd-6cb0-498a-8858-1e70177031d9.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 658.419547] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-79a77d3d-f130-43dc-bbfe-e029d7ec1750 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.440862] env[68674]: DEBUG oslo_vmware.api [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Waiting for the task: (returnval){ [ 658.440862] env[68674]: value = "task-3239645" [ 658.440862] env[68674]: _type = "Task" [ 658.440862] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.451157] env[68674]: DEBUG oslo_vmware.api [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': task-3239645, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.558014] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239644, 'name': CreateVM_Task, 'duration_secs': 0.404858} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.558216] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 658.558851] env[68674]: DEBUG oslo_concurrency.lockutils [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.559020] env[68674]: DEBUG oslo_concurrency.lockutils [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 658.559340] env[68674]: DEBUG oslo_concurrency.lockutils [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 658.559581] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d75cbc3a-ad7b-43c2-9c20-f6169e485d5f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.563874] env[68674]: DEBUG oslo_vmware.api [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 658.563874] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]526cd1a5-2248-5efa-8e48-4c88aeac2109" [ 658.563874] env[68674]: _type = "Task" [ 658.563874] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.571057] env[68674]: DEBUG oslo_vmware.api [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]526cd1a5-2248-5efa-8e48-4c88aeac2109, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.577540] env[68674]: INFO nova.compute.manager [-] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Took 1.94 seconds to deallocate network for instance. [ 658.854265] env[68674]: DEBUG oslo_concurrency.lockutils [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 658.859276] env[68674]: DEBUG oslo_concurrency.lockutils [None req-de4dd0d6-e6a7-4cdb-ad29-57349afb13f1 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Acquiring lock "0097c367-bb3a-4b7b-9fcc-b3e3482689e2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 658.859499] env[68674]: DEBUG oslo_concurrency.lockutils [None req-de4dd0d6-e6a7-4cdb-ad29-57349afb13f1 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Lock "0097c367-bb3a-4b7b-9fcc-b3e3482689e2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 658.859694] env[68674]: DEBUG oslo_concurrency.lockutils [None req-de4dd0d6-e6a7-4cdb-ad29-57349afb13f1 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Acquiring lock "0097c367-bb3a-4b7b-9fcc-b3e3482689e2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 658.859873] env[68674]: DEBUG oslo_concurrency.lockutils [None req-de4dd0d6-e6a7-4cdb-ad29-57349afb13f1 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Lock "0097c367-bb3a-4b7b-9fcc-b3e3482689e2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 658.860072] env[68674]: DEBUG oslo_concurrency.lockutils [None req-de4dd0d6-e6a7-4cdb-ad29-57349afb13f1 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Lock "0097c367-bb3a-4b7b-9fcc-b3e3482689e2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 658.862626] env[68674]: INFO nova.compute.manager [None req-de4dd0d6-e6a7-4cdb-ad29-57349afb13f1 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Terminating instance [ 658.912710] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.809s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 658.913225] env[68674]: DEBUG nova.compute.manager [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 658.915775] env[68674]: DEBUG oslo_concurrency.lockutils [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 34.999s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 658.915989] env[68674]: DEBUG nova.objects.instance [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68674) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 658.951969] env[68674]: DEBUG oslo_vmware.api [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': task-3239645, 'name': ReconfigVM_Task, 'duration_secs': 0.292213} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.952305] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Reconfigured VM instance instance-0000001c to attach disk [datastore2] f45200cd-6cb0-498a-8858-1e70177031d9/f45200cd-6cb0-498a-8858-1e70177031d9.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 658.953182] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fd7755db-3bd6-48cf-854e-c80d0da02693 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.959886] env[68674]: DEBUG oslo_vmware.api [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Waiting for the task: (returnval){ [ 658.959886] env[68674]: value = "task-3239646" [ 658.959886] env[68674]: _type = "Task" [ 658.959886] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.973030] env[68674]: DEBUG oslo_vmware.api [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': task-3239646, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.049510] env[68674]: DEBUG nova.network.neutron [req-4d5f7e19-5b2b-4454-b8bb-0bbad1cd8ea9 req-cbc06d32-8c3d-42e4-8ca8-2a76f5a252c0 service nova] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Updated VIF entry in instance network info cache for port 55160236-eb1d-47d3-bca8-d3b46267c37f. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 659.049863] env[68674]: DEBUG nova.network.neutron [req-4d5f7e19-5b2b-4454-b8bb-0bbad1cd8ea9 req-cbc06d32-8c3d-42e4-8ca8-2a76f5a252c0 service nova] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Updating instance_info_cache with network_info: [{"id": "55160236-eb1d-47d3-bca8-d3b46267c37f", "address": "fa:16:3e:3e:26:3e", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55160236-eb", "ovs_interfaceid": "55160236-eb1d-47d3-bca8-d3b46267c37f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 659.081661] env[68674]: DEBUG oslo_vmware.api [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]526cd1a5-2248-5efa-8e48-4c88aeac2109, 'name': SearchDatastore_Task, 'duration_secs': 0.014628} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.082045] env[68674]: DEBUG oslo_concurrency.lockutils [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 659.082337] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 659.082639] env[68674]: DEBUG oslo_concurrency.lockutils [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.082806] env[68674]: DEBUG oslo_concurrency.lockutils [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 659.082993] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 659.083897] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1062fbb4-9f13-45e5-8716-434a3511d528 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 659.088200] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2fc05374-6229-47da-ac5a-761d7391b0cc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.104845] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 659.105067] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 659.105831] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5398256d-6355-48cc-8ada-a3d5099e0178 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.111640] env[68674]: DEBUG oslo_vmware.api [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 659.111640] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5274873c-0052-83a2-9028-27c7d424c20c" [ 659.111640] env[68674]: _type = "Task" [ 659.111640] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.119052] env[68674]: DEBUG oslo_vmware.api [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5274873c-0052-83a2-9028-27c7d424c20c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.265341] env[68674]: DEBUG nova.network.neutron [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Successfully updated port: b1a3099e-550f-4bc4-a4b5-1fe1e04ea342 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 659.367200] env[68674]: DEBUG nova.compute.manager [None req-de4dd0d6-e6a7-4cdb-ad29-57349afb13f1 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 659.367446] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-de4dd0d6-e6a7-4cdb-ad29-57349afb13f1 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 659.368389] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4466329-e977-43f5-8cac-8625a23afc02 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.375838] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-de4dd0d6-e6a7-4cdb-ad29-57349afb13f1 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 659.376085] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-73cf8dc4-24e1-44b4-a219-10c287af5a1f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.381729] env[68674]: DEBUG oslo_vmware.api [None req-de4dd0d6-e6a7-4cdb-ad29-57349afb13f1 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Waiting for the task: (returnval){ [ 659.381729] env[68674]: value = "task-3239647" [ 659.381729] env[68674]: _type = "Task" [ 659.381729] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.389144] env[68674]: DEBUG oslo_vmware.api [None req-de4dd0d6-e6a7-4cdb-ad29-57349afb13f1 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Task: {'id': task-3239647, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.423706] env[68674]: DEBUG nova.compute.utils [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 659.428020] env[68674]: DEBUG nova.compute.manager [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 659.428020] env[68674]: DEBUG nova.network.neutron [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 23891bad-1b63-4237-9243-78954cf67d52] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 659.472240] env[68674]: DEBUG oslo_vmware.api [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': task-3239646, 'name': Rename_Task, 'duration_secs': 0.13153} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.472611] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 659.472868] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d095145e-fc7b-40fa-828a-662be185e4ab {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.479406] env[68674]: DEBUG oslo_vmware.api [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Waiting for the task: (returnval){ [ 659.479406] env[68674]: value = "task-3239648" [ 659.479406] env[68674]: _type = "Task" [ 659.479406] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.487282] env[68674]: DEBUG oslo_vmware.api [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': task-3239648, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.496877] env[68674]: DEBUG nova.policy [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5080a4f68ef1482caaee5aa26614e6f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c958fcb56a934ef7919b76aa2a193429', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 659.552493] env[68674]: DEBUG oslo_concurrency.lockutils [req-4d5f7e19-5b2b-4454-b8bb-0bbad1cd8ea9 req-cbc06d32-8c3d-42e4-8ca8-2a76f5a252c0 service nova] Releasing lock "refresh_cache-045e54ff-9e2c-4b04-afac-34cb6580cb2c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 659.552784] env[68674]: DEBUG nova.compute.manager [req-4d5f7e19-5b2b-4454-b8bb-0bbad1cd8ea9 req-cbc06d32-8c3d-42e4-8ca8-2a76f5a252c0 service nova] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Received event network-vif-deleted-28a81236-a1d4-4c87-86fb-5ad97008d9bb {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 659.622487] env[68674]: DEBUG oslo_vmware.api [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5274873c-0052-83a2-9028-27c7d424c20c, 'name': SearchDatastore_Task, 'duration_secs': 0.008169} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.623272] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80d0b271-779e-4a00-9163-b88febf7993c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.628318] env[68674]: DEBUG oslo_vmware.api [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 659.628318] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d1577f-6dc6-3bc7-a587-f33a3a0d291b" [ 659.628318] env[68674]: _type = "Task" [ 659.628318] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.635878] env[68674]: DEBUG oslo_vmware.api [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d1577f-6dc6-3bc7-a587-f33a3a0d291b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.771683] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquiring lock "refresh_cache-77fa5a89-961b-4c84-a75e-a5be0253677e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.771862] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquired lock "refresh_cache-77fa5a89-961b-4c84-a75e-a5be0253677e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 659.772030] env[68674]: DEBUG nova.network.neutron [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 659.879603] env[68674]: DEBUG nova.network.neutron [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Successfully created port: 3660c8d4-d8be-4132-b92b-f96aa37b627b {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 659.892113] env[68674]: DEBUG oslo_vmware.api [None req-de4dd0d6-e6a7-4cdb-ad29-57349afb13f1 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Task: {'id': task-3239647, 'name': PowerOffVM_Task, 'duration_secs': 0.206726} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.892526] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-de4dd0d6-e6a7-4cdb-ad29-57349afb13f1 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 659.892828] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-de4dd0d6-e6a7-4cdb-ad29-57349afb13f1 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 659.893207] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2fd28a30-4adb-487b-99f1-1ace1c988ed5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.928035] env[68674]: DEBUG oslo_concurrency.lockutils [None req-49ce8875-c20b-493b-96d5-aea97c24ad75 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 659.928282] env[68674]: DEBUG oslo_concurrency.lockutils [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.390s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 659.929882] env[68674]: INFO nova.compute.claims [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 659.933705] env[68674]: DEBUG nova.compute.manager [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 659.968306] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-de4dd0d6-e6a7-4cdb-ad29-57349afb13f1 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 659.972018] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-de4dd0d6-e6a7-4cdb-ad29-57349afb13f1 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 659.972018] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-de4dd0d6-e6a7-4cdb-ad29-57349afb13f1 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Deleting the datastore file [datastore2] 0097c367-bb3a-4b7b-9fcc-b3e3482689e2 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 659.972018] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5026f7a7-8948-43bb-8c70-2005acd50578 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.977275] env[68674]: DEBUG oslo_vmware.api [None req-de4dd0d6-e6a7-4cdb-ad29-57349afb13f1 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Waiting for the task: (returnval){ [ 659.977275] env[68674]: value = "task-3239650" [ 659.977275] env[68674]: _type = "Task" [ 659.977275] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.992276] env[68674]: DEBUG oslo_vmware.api [None req-de4dd0d6-e6a7-4cdb-ad29-57349afb13f1 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Task: {'id': task-3239650, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.995483] env[68674]: DEBUG oslo_vmware.api [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': task-3239648, 'name': PowerOnVM_Task, 'duration_secs': 0.441707} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.996668] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 659.996879] env[68674]: DEBUG nova.compute.manager [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 659.997685] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5267856-e092-4460-b698-54504f70e149 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.138658] env[68674]: DEBUG oslo_vmware.api [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d1577f-6dc6-3bc7-a587-f33a3a0d291b, 'name': SearchDatastore_Task, 'duration_secs': 0.011197} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.138936] env[68674]: DEBUG oslo_concurrency.lockutils [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 660.139223] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 045e54ff-9e2c-4b04-afac-34cb6580cb2c/045e54ff-9e2c-4b04-afac-34cb6580cb2c.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 660.139480] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f0bac65f-3c65-4bb8-918a-508ea8f231a6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.146445] env[68674]: DEBUG oslo_vmware.api [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 660.146445] env[68674]: value = "task-3239651" [ 660.146445] env[68674]: _type = "Task" [ 660.146445] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.154105] env[68674]: DEBUG oslo_vmware.api [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3239651, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.281984] env[68674]: DEBUG nova.compute.manager [req-a9e1d52c-67a9-4fd6-83df-f82f8f72c46a req-417e09f1-5e87-4517-9dda-7eeac4a37e08 service nova] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Received event network-vif-plugged-b1a3099e-550f-4bc4-a4b5-1fe1e04ea342 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 660.282313] env[68674]: DEBUG oslo_concurrency.lockutils [req-a9e1d52c-67a9-4fd6-83df-f82f8f72c46a req-417e09f1-5e87-4517-9dda-7eeac4a37e08 service nova] Acquiring lock "77fa5a89-961b-4c84-a75e-a5be0253677e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 660.282606] env[68674]: DEBUG oslo_concurrency.lockutils [req-a9e1d52c-67a9-4fd6-83df-f82f8f72c46a req-417e09f1-5e87-4517-9dda-7eeac4a37e08 service nova] Lock "77fa5a89-961b-4c84-a75e-a5be0253677e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 660.282851] env[68674]: DEBUG oslo_concurrency.lockutils [req-a9e1d52c-67a9-4fd6-83df-f82f8f72c46a req-417e09f1-5e87-4517-9dda-7eeac4a37e08 service nova] Lock "77fa5a89-961b-4c84-a75e-a5be0253677e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 660.283112] env[68674]: DEBUG nova.compute.manager [req-a9e1d52c-67a9-4fd6-83df-f82f8f72c46a req-417e09f1-5e87-4517-9dda-7eeac4a37e08 service nova] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] No waiting events found dispatching network-vif-plugged-b1a3099e-550f-4bc4-a4b5-1fe1e04ea342 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 660.283366] env[68674]: WARNING nova.compute.manager [req-a9e1d52c-67a9-4fd6-83df-f82f8f72c46a req-417e09f1-5e87-4517-9dda-7eeac4a37e08 service nova] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Received unexpected event network-vif-plugged-b1a3099e-550f-4bc4-a4b5-1fe1e04ea342 for instance with vm_state building and task_state spawning. [ 660.283609] env[68674]: DEBUG nova.compute.manager [req-a9e1d52c-67a9-4fd6-83df-f82f8f72c46a req-417e09f1-5e87-4517-9dda-7eeac4a37e08 service nova] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Received event network-changed-b1a3099e-550f-4bc4-a4b5-1fe1e04ea342 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 660.283843] env[68674]: DEBUG nova.compute.manager [req-a9e1d52c-67a9-4fd6-83df-f82f8f72c46a req-417e09f1-5e87-4517-9dda-7eeac4a37e08 service nova] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Refreshing instance network info cache due to event network-changed-b1a3099e-550f-4bc4-a4b5-1fe1e04ea342. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 660.284104] env[68674]: DEBUG oslo_concurrency.lockutils [req-a9e1d52c-67a9-4fd6-83df-f82f8f72c46a req-417e09f1-5e87-4517-9dda-7eeac4a37e08 service nova] Acquiring lock "refresh_cache-77fa5a89-961b-4c84-a75e-a5be0253677e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 660.308281] env[68674]: DEBUG nova.network.neutron [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 660.485250] env[68674]: DEBUG nova.network.neutron [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Updating instance_info_cache with network_info: [{"id": "b1a3099e-550f-4bc4-a4b5-1fe1e04ea342", "address": "fa:16:3e:dc:4d:99", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1a3099e-55", "ovs_interfaceid": "b1a3099e-550f-4bc4-a4b5-1fe1e04ea342", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 660.492902] env[68674]: DEBUG oslo_vmware.api [None req-de4dd0d6-e6a7-4cdb-ad29-57349afb13f1 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Task: {'id': task-3239650, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.327601} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.493129] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-de4dd0d6-e6a7-4cdb-ad29-57349afb13f1 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 660.493427] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-de4dd0d6-e6a7-4cdb-ad29-57349afb13f1 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 660.493512] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-de4dd0d6-e6a7-4cdb-ad29-57349afb13f1 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 660.493683] env[68674]: INFO nova.compute.manager [None req-de4dd0d6-e6a7-4cdb-ad29-57349afb13f1 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Took 1.13 seconds to destroy the instance on the hypervisor. [ 660.493929] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-de4dd0d6-e6a7-4cdb-ad29-57349afb13f1 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 660.494370] env[68674]: DEBUG nova.compute.manager [-] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 660.494468] env[68674]: DEBUG nova.network.neutron [-] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 660.515303] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 660.659971] env[68674]: DEBUG oslo_vmware.api [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3239651, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.959767] env[68674]: DEBUG nova.compute.manager [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 660.986368] env[68674]: DEBUG nova.virt.hardware [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 660.986620] env[68674]: DEBUG nova.virt.hardware [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 660.986781] env[68674]: DEBUG nova.virt.hardware [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 660.986951] env[68674]: DEBUG nova.virt.hardware [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 660.987113] env[68674]: DEBUG nova.virt.hardware [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 660.987261] env[68674]: DEBUG nova.virt.hardware [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 660.987557] env[68674]: DEBUG nova.virt.hardware [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 660.987736] env[68674]: DEBUG nova.virt.hardware [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 660.987910] env[68674]: DEBUG nova.virt.hardware [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 660.988096] env[68674]: DEBUG nova.virt.hardware [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 660.988276] env[68674]: DEBUG nova.virt.hardware [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 660.989545] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Releasing lock "refresh_cache-77fa5a89-961b-4c84-a75e-a5be0253677e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 660.989829] env[68674]: DEBUG nova.compute.manager [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Instance network_info: |[{"id": "b1a3099e-550f-4bc4-a4b5-1fe1e04ea342", "address": "fa:16:3e:dc:4d:99", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1a3099e-55", "ovs_interfaceid": "b1a3099e-550f-4bc4-a4b5-1fe1e04ea342", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 660.990654] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f368ff3b-c1c3-4c6d-a3b4-92f5356f32d6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.995336] env[68674]: DEBUG oslo_concurrency.lockutils [req-a9e1d52c-67a9-4fd6-83df-f82f8f72c46a req-417e09f1-5e87-4517-9dda-7eeac4a37e08 service nova] Acquired lock "refresh_cache-77fa5a89-961b-4c84-a75e-a5be0253677e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 660.995523] env[68674]: DEBUG nova.network.neutron [req-a9e1d52c-67a9-4fd6-83df-f82f8f72c46a req-417e09f1-5e87-4517-9dda-7eeac4a37e08 service nova] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Refreshing network info cache for port b1a3099e-550f-4bc4-a4b5-1fe1e04ea342 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 660.996660] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dc:4d:99', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'df1bf911-aac9-4d2d-ae69-66ace3e6a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b1a3099e-550f-4bc4-a4b5-1fe1e04ea342', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 661.004146] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Creating folder: Project (22d2d9d6bfcd4f219b02b1356b36123e). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 661.004838] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aa3594a5-570a-4101-b82a-b27265bc318d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.013187] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7219ffd0-b827-40bc-b9f8-3efd1e0f4498 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.024426] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Created folder: Project (22d2d9d6bfcd4f219b02b1356b36123e) in parent group-v647377. [ 661.024821] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Creating folder: Instances. Parent ref: group-v647473. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 661.025896] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a5c18d58-9c1b-4785-aff1-4390da2206fa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.036133] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Created folder: Instances in parent group-v647473. [ 661.036133] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 661.038296] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 661.038650] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9c8f9e4e-d2ff-44ce-a11a-d88b2291eaf1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.059161] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 661.059161] env[68674]: value = "task-3239654" [ 661.059161] env[68674]: _type = "Task" [ 661.059161] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.067803] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239654, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.160220] env[68674]: DEBUG oslo_vmware.api [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3239651, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.606883} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.160478] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 045e54ff-9e2c-4b04-afac-34cb6580cb2c/045e54ff-9e2c-4b04-afac-34cb6580cb2c.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 661.161318] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 661.161318] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-02b7acc1-d1d0-4b02-8cf9-376ef81c0717 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.170902] env[68674]: DEBUG oslo_vmware.api [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 661.170902] env[68674]: value = "task-3239655" [ 661.170902] env[68674]: _type = "Task" [ 661.170902] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.179697] env[68674]: DEBUG oslo_vmware.api [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3239655, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.319090] env[68674]: DEBUG nova.network.neutron [-] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.546019] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9309f62b-8017-4de2-b767-08c75c78d507 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.554387] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89e0ea68-83a4-4a43-8940-a431bcf7b416 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.589372] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f514a328-13e3-4630-be1c-5d885a4f46d7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.595512] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239654, 'name': CreateVM_Task, 'duration_secs': 0.354933} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.596426] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 661.597439] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.597766] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 661.598204] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 661.600376] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-009537dc-f6eb-4139-9f58-ee1fd57d08b1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.607020] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b04108b6-89e7-42db-868b-94ef58270f9a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.611194] env[68674]: DEBUG oslo_vmware.api [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 661.611194] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ae6077-ca3c-fdee-9f7c-cc1bac535407" [ 661.611194] env[68674]: _type = "Task" [ 661.611194] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.620269] env[68674]: DEBUG nova.compute.provider_tree [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 661.630579] env[68674]: DEBUG oslo_vmware.api [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ae6077-ca3c-fdee-9f7c-cc1bac535407, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.682157] env[68674]: DEBUG oslo_vmware.api [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3239655, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069324} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.682407] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 661.683179] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4e3c514-8751-4bc4-8b9a-c30c8287774e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.697167] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Acquiring lock "f45200cd-6cb0-498a-8858-1e70177031d9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 661.697388] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Lock "f45200cd-6cb0-498a-8858-1e70177031d9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 661.697590] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Acquiring lock "f45200cd-6cb0-498a-8858-1e70177031d9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 661.697773] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Lock "f45200cd-6cb0-498a-8858-1e70177031d9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 661.697939] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Lock "f45200cd-6cb0-498a-8858-1e70177031d9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 661.707964] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Reconfiguring VM instance instance-0000001e to attach disk [datastore2] 045e54ff-9e2c-4b04-afac-34cb6580cb2c/045e54ff-9e2c-4b04-afac-34cb6580cb2c.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 661.708446] env[68674]: INFO nova.compute.manager [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Terminating instance [ 661.709958] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a90ab49d-2596-43ad-9140-e7c1e532145a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.734567] env[68674]: DEBUG oslo_vmware.api [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 661.734567] env[68674]: value = "task-3239656" [ 661.734567] env[68674]: _type = "Task" [ 661.734567] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.743817] env[68674]: DEBUG oslo_vmware.api [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3239656, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.794913] env[68674]: DEBUG nova.network.neutron [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Successfully updated port: 3660c8d4-d8be-4132-b92b-f96aa37b627b {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 661.822497] env[68674]: INFO nova.compute.manager [-] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Took 1.33 seconds to deallocate network for instance. [ 661.990197] env[68674]: DEBUG nova.network.neutron [req-a9e1d52c-67a9-4fd6-83df-f82f8f72c46a req-417e09f1-5e87-4517-9dda-7eeac4a37e08 service nova] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Updated VIF entry in instance network info cache for port b1a3099e-550f-4bc4-a4b5-1fe1e04ea342. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 661.990704] env[68674]: DEBUG nova.network.neutron [req-a9e1d52c-67a9-4fd6-83df-f82f8f72c46a req-417e09f1-5e87-4517-9dda-7eeac4a37e08 service nova] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Updating instance_info_cache with network_info: [{"id": "b1a3099e-550f-4bc4-a4b5-1fe1e04ea342", "address": "fa:16:3e:dc:4d:99", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1a3099e-55", "ovs_interfaceid": "b1a3099e-550f-4bc4-a4b5-1fe1e04ea342", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.130595] env[68674]: DEBUG nova.scheduler.client.report [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 662.134008] env[68674]: DEBUG oslo_vmware.api [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ae6077-ca3c-fdee-9f7c-cc1bac535407, 'name': SearchDatastore_Task, 'duration_secs': 0.069058} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.134614] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 662.135247] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 662.135247] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.135247] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 662.135953] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 662.135953] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-de69cc5b-66aa-41e3-992d-1543832144e8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.147914] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 662.147914] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 662.148610] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6011a28a-c210-4409-bc75-66181cf7d087 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.154441] env[68674]: DEBUG oslo_vmware.api [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 662.154441] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a8be49-e889-c837-0956-48262f212e44" [ 662.154441] env[68674]: _type = "Task" [ 662.154441] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.161982] env[68674]: DEBUG oslo_vmware.api [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a8be49-e889-c837-0956-48262f212e44, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.229916] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Acquiring lock "refresh_cache-f45200cd-6cb0-498a-8858-1e70177031d9" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.230130] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Acquired lock "refresh_cache-f45200cd-6cb0-498a-8858-1e70177031d9" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 662.230315] env[68674]: DEBUG nova.network.neutron [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 662.244948] env[68674]: DEBUG oslo_vmware.api [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3239656, 'name': ReconfigVM_Task, 'duration_secs': 0.278297} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.245882] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Reconfigured VM instance instance-0000001e to attach disk [datastore2] 045e54ff-9e2c-4b04-afac-34cb6580cb2c/045e54ff-9e2c-4b04-afac-34cb6580cb2c.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 662.246016] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e4219a9c-32b4-491b-8377-5865929d72e8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.252159] env[68674]: DEBUG oslo_vmware.api [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 662.252159] env[68674]: value = "task-3239657" [ 662.252159] env[68674]: _type = "Task" [ 662.252159] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.259831] env[68674]: DEBUG oslo_vmware.api [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3239657, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.297436] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "refresh_cache-23891bad-1b63-4237-9243-78954cf67d52" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.297684] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquired lock "refresh_cache-23891bad-1b63-4237-9243-78954cf67d52" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 662.297884] env[68674]: DEBUG nova.network.neutron [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 662.329252] env[68674]: DEBUG oslo_concurrency.lockutils [None req-de4dd0d6-e6a7-4cdb-ad29-57349afb13f1 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 662.342651] env[68674]: DEBUG nova.compute.manager [req-ab10f2c4-b497-4aea-a74b-a6c13ace704d req-d2b35810-8a4c-41e4-9483-30e69f491f8f service nova] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Received event network-vif-deleted-c90fb527-622f-4719-996e-476716d097db {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 662.342865] env[68674]: DEBUG nova.compute.manager [req-ab10f2c4-b497-4aea-a74b-a6c13ace704d req-d2b35810-8a4c-41e4-9483-30e69f491f8f service nova] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Received event network-vif-plugged-3660c8d4-d8be-4132-b92b-f96aa37b627b {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 662.343118] env[68674]: DEBUG oslo_concurrency.lockutils [req-ab10f2c4-b497-4aea-a74b-a6c13ace704d req-d2b35810-8a4c-41e4-9483-30e69f491f8f service nova] Acquiring lock "23891bad-1b63-4237-9243-78954cf67d52-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 662.343368] env[68674]: DEBUG oslo_concurrency.lockutils [req-ab10f2c4-b497-4aea-a74b-a6c13ace704d req-d2b35810-8a4c-41e4-9483-30e69f491f8f service nova] Lock "23891bad-1b63-4237-9243-78954cf67d52-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 662.343521] env[68674]: DEBUG oslo_concurrency.lockutils [req-ab10f2c4-b497-4aea-a74b-a6c13ace704d req-d2b35810-8a4c-41e4-9483-30e69f491f8f service nova] Lock "23891bad-1b63-4237-9243-78954cf67d52-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 662.343735] env[68674]: DEBUG nova.compute.manager [req-ab10f2c4-b497-4aea-a74b-a6c13ace704d req-d2b35810-8a4c-41e4-9483-30e69f491f8f service nova] [instance: 23891bad-1b63-4237-9243-78954cf67d52] No waiting events found dispatching network-vif-plugged-3660c8d4-d8be-4132-b92b-f96aa37b627b {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 662.343913] env[68674]: WARNING nova.compute.manager [req-ab10f2c4-b497-4aea-a74b-a6c13ace704d req-d2b35810-8a4c-41e4-9483-30e69f491f8f service nova] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Received unexpected event network-vif-plugged-3660c8d4-d8be-4132-b92b-f96aa37b627b for instance with vm_state building and task_state spawning. [ 662.344092] env[68674]: DEBUG nova.compute.manager [req-ab10f2c4-b497-4aea-a74b-a6c13ace704d req-d2b35810-8a4c-41e4-9483-30e69f491f8f service nova] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Received event network-changed-3660c8d4-d8be-4132-b92b-f96aa37b627b {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 662.344249] env[68674]: DEBUG nova.compute.manager [req-ab10f2c4-b497-4aea-a74b-a6c13ace704d req-d2b35810-8a4c-41e4-9483-30e69f491f8f service nova] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Refreshing instance network info cache due to event network-changed-3660c8d4-d8be-4132-b92b-f96aa37b627b. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 662.344416] env[68674]: DEBUG oslo_concurrency.lockutils [req-ab10f2c4-b497-4aea-a74b-a6c13ace704d req-d2b35810-8a4c-41e4-9483-30e69f491f8f service nova] Acquiring lock "refresh_cache-23891bad-1b63-4237-9243-78954cf67d52" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.493477] env[68674]: DEBUG oslo_concurrency.lockutils [req-a9e1d52c-67a9-4fd6-83df-f82f8f72c46a req-417e09f1-5e87-4517-9dda-7eeac4a37e08 service nova] Releasing lock "refresh_cache-77fa5a89-961b-4c84-a75e-a5be0253677e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 662.637020] env[68674]: DEBUG oslo_concurrency.lockutils [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.709s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 662.637485] env[68674]: DEBUG nova.compute.manager [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 662.640871] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.131s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 662.642246] env[68674]: INFO nova.compute.claims [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 662.664164] env[68674]: DEBUG oslo_vmware.api [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a8be49-e889-c837-0956-48262f212e44, 'name': SearchDatastore_Task, 'duration_secs': 0.032756} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.665016] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93535d76-9162-4dff-a973-b92c606e4bf6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.670078] env[68674]: DEBUG oslo_vmware.api [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 662.670078] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]522e94e2-5d17-e07f-6db6-7daa22f52f77" [ 662.670078] env[68674]: _type = "Task" [ 662.670078] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.678660] env[68674]: DEBUG oslo_vmware.api [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]522e94e2-5d17-e07f-6db6-7daa22f52f77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.748745] env[68674]: DEBUG nova.network.neutron [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 662.763240] env[68674]: DEBUG oslo_vmware.api [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3239657, 'name': Rename_Task, 'duration_secs': 0.162471} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.765280] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 662.765532] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f1b00702-eee5-4e26-85d0-a035dd971eb2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.772530] env[68674]: DEBUG oslo_vmware.api [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 662.772530] env[68674]: value = "task-3239658" [ 662.772530] env[68674]: _type = "Task" [ 662.772530] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.780417] env[68674]: DEBUG oslo_vmware.api [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3239658, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.813633] env[68674]: DEBUG nova.network.neutron [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.837747] env[68674]: DEBUG nova.network.neutron [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 663.034192] env[68674]: DEBUG nova.network.neutron [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Updating instance_info_cache with network_info: [{"id": "3660c8d4-d8be-4132-b92b-f96aa37b627b", "address": "fa:16:3e:60:3b:77", "network": {"id": "eae87694-bbf6-4eed-9305-26be80e0529b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1262353116-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c958fcb56a934ef7919b76aa2a193429", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3660c8d4-d8", "ovs_interfaceid": "3660c8d4-d8be-4132-b92b-f96aa37b627b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.143275] env[68674]: DEBUG nova.compute.utils [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 663.144733] env[68674]: DEBUG nova.compute.manager [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 663.144935] env[68674]: DEBUG nova.network.neutron [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 663.182478] env[68674]: DEBUG oslo_vmware.api [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]522e94e2-5d17-e07f-6db6-7daa22f52f77, 'name': SearchDatastore_Task, 'duration_secs': 0.016197} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.182761] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 663.183035] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 77fa5a89-961b-4c84-a75e-a5be0253677e/77fa5a89-961b-4c84-a75e-a5be0253677e.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 663.183287] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bd7355ab-f108-4057-8fd7-63969f30c1fd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.190149] env[68674]: DEBUG oslo_vmware.api [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 663.190149] env[68674]: value = "task-3239659" [ 663.190149] env[68674]: _type = "Task" [ 663.190149] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.196394] env[68674]: DEBUG nova.policy [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b5be2334af3643edb51d6d21a6c6d308', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e935d3cc470460cb5fa899412f4b0d1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 663.202473] env[68674]: DEBUG oslo_vmware.api [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3239659, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.284280] env[68674]: DEBUG oslo_vmware.api [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3239658, 'name': PowerOnVM_Task, 'duration_secs': 0.480501} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.284603] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 663.284834] env[68674]: INFO nova.compute.manager [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Took 7.92 seconds to spawn the instance on the hypervisor. [ 663.285019] env[68674]: DEBUG nova.compute.manager [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 663.285788] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0c9ea42-3619-427a-93b1-d132c30007d5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.316235] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Releasing lock "refresh_cache-f45200cd-6cb0-498a-8858-1e70177031d9" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 663.316707] env[68674]: DEBUG nova.compute.manager [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 663.316938] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 663.317916] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d910b86b-fad4-4517-ac4c-91b8e00c16b8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.325938] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 663.327567] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ba9551e2-9dac-42a1-baa1-b1ba437bcf8a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.336373] env[68674]: DEBUG oslo_vmware.api [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Waiting for the task: (returnval){ [ 663.336373] env[68674]: value = "task-3239660" [ 663.336373] env[68674]: _type = "Task" [ 663.336373] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.347623] env[68674]: DEBUG oslo_vmware.api [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': task-3239660, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.539723] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Releasing lock "refresh_cache-23891bad-1b63-4237-9243-78954cf67d52" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 663.540120] env[68674]: DEBUG nova.compute.manager [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Instance network_info: |[{"id": "3660c8d4-d8be-4132-b92b-f96aa37b627b", "address": "fa:16:3e:60:3b:77", "network": {"id": "eae87694-bbf6-4eed-9305-26be80e0529b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1262353116-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c958fcb56a934ef7919b76aa2a193429", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3660c8d4-d8", "ovs_interfaceid": "3660c8d4-d8be-4132-b92b-f96aa37b627b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 663.540450] env[68674]: DEBUG oslo_concurrency.lockutils [req-ab10f2c4-b497-4aea-a74b-a6c13ace704d req-d2b35810-8a4c-41e4-9483-30e69f491f8f service nova] Acquired lock "refresh_cache-23891bad-1b63-4237-9243-78954cf67d52" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 663.540664] env[68674]: DEBUG nova.network.neutron [req-ab10f2c4-b497-4aea-a74b-a6c13ace704d req-d2b35810-8a4c-41e4-9483-30e69f491f8f service nova] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Refreshing network info cache for port 3660c8d4-d8be-4132-b92b-f96aa37b627b {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 663.542975] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:3b:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b98c49ac-0eb7-4311-aa8f-60581b2ce706', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3660c8d4-d8be-4132-b92b-f96aa37b627b', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 663.554490] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Creating folder: Project (c958fcb56a934ef7919b76aa2a193429). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 663.557311] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f0d407c4-548d-4531-8685-891ee1034416 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.570375] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Created folder: Project (c958fcb56a934ef7919b76aa2a193429) in parent group-v647377. [ 663.570648] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Creating folder: Instances. Parent ref: group-v647476. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 663.570919] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-24e95d43-7ffe-4a49-82cd-0351cbbf3c68 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.580757] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Created folder: Instances in parent group-v647476. [ 663.581091] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 663.581304] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 663.581882] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f8dbd0ec-0b2a-46d4-ac29-bfae54280dd5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.601842] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 663.601842] env[68674]: value = "task-3239663" [ 663.601842] env[68674]: _type = "Task" [ 663.601842] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.610963] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239663, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.618490] env[68674]: DEBUG nova.network.neutron [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Successfully created port: ad353ee1-e0ca-436b-b58e-eae548257eed {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 663.648540] env[68674]: DEBUG nova.compute.manager [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 663.706983] env[68674]: DEBUG oslo_vmware.api [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3239659, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.812435] env[68674]: INFO nova.compute.manager [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Took 47.52 seconds to build instance. [ 663.846075] env[68674]: DEBUG oslo_vmware.api [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': task-3239660, 'name': PowerOffVM_Task, 'duration_secs': 0.215271} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.846373] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 663.846593] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 663.846815] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3b10120a-eb09-4df2-a914-b684a8797eb4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.873843] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 663.874085] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 663.874274] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Deleting the datastore file [datastore2] f45200cd-6cb0-498a-8858-1e70177031d9 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 663.874529] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dbeb82ca-be92-44f5-bb79-d897bf3d1d98 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.882961] env[68674]: DEBUG oslo_vmware.api [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Waiting for the task: (returnval){ [ 663.882961] env[68674]: value = "task-3239665" [ 663.882961] env[68674]: _type = "Task" [ 663.882961] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.892821] env[68674]: DEBUG oslo_vmware.api [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': task-3239665, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.111437] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239663, 'name': CreateVM_Task, 'duration_secs': 0.365173} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.113833] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 664.114842] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.115094] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 664.115331] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 664.115767] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a3f098b-b13f-4b9c-acb1-da3b7c2fd8de {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.122259] env[68674]: DEBUG oslo_vmware.api [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 664.122259] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]522348ca-b746-9cc6-c861-095e91aada45" [ 664.122259] env[68674]: _type = "Task" [ 664.122259] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.133490] env[68674]: DEBUG oslo_vmware.api [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]522348ca-b746-9cc6-c861-095e91aada45, 'name': SearchDatastore_Task, 'duration_secs': 0.009103} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.133750] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 664.133991] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 664.134438] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.134438] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 664.134563] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 664.134788] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-03fefcc4-0c1e-405e-899f-3cfe4d6cd520 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.143443] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 664.143639] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 664.144374] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8553c994-8b0d-4891-a431-1cbb4390e219 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.156546] env[68674]: INFO nova.virt.block_device [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Booting with volume 6312e471-ebb6-489c-a453-cfed8d42b5ac at /dev/sda [ 664.160776] env[68674]: DEBUG oslo_vmware.api [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 664.160776] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52894c9d-95a1-3ffa-a8f7-bd3788fe82c3" [ 664.160776] env[68674]: _type = "Task" [ 664.160776] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.179487] env[68674]: DEBUG oslo_vmware.api [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52894c9d-95a1-3ffa-a8f7-bd3788fe82c3, 'name': SearchDatastore_Task, 'duration_secs': 0.008766} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.182392] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea2856b1-e84c-43e8-8250-55febf1e24e4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.187907] env[68674]: DEBUG oslo_vmware.api [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 664.187907] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]529c9711-903b-1797-4be7-c3a27e9c1197" [ 664.187907] env[68674]: _type = "Task" [ 664.187907] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.196751] env[68674]: DEBUG oslo_vmware.api [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]529c9711-903b-1797-4be7-c3a27e9c1197, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.203975] env[68674]: DEBUG oslo_vmware.api [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3239659, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.563031} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.203975] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 77fa5a89-961b-4c84-a75e-a5be0253677e/77fa5a89-961b-4c84-a75e-a5be0253677e.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 664.204154] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 664.204366] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1e616b5d-3b76-47ec-b29f-db1bb03b91a5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.211838] env[68674]: DEBUG oslo_vmware.api [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 664.211838] env[68674]: value = "task-3239666" [ 664.211838] env[68674]: _type = "Task" [ 664.211838] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.221444] env[68674]: DEBUG oslo_vmware.api [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3239666, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.221706] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-45219956-4388-419c-a56a-d416a13b5882 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.240917] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4389087e-6796-4d1f-b839-eeac0b95d875 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.264270] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a92d5f4-7d0d-4f6d-9186-bfda1fd43da5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.282951] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e6e93f83-1bdb-486e-a484-2fa1d069f6e4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.287454] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fea72ec2-59e1-48ae-807e-5770662cc632 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.294482] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f694ea25-56fe-47ab-835f-34814345cb1e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.331398] env[68674]: DEBUG oslo_concurrency.lockutils [None req-24741f6f-f4c1-4948-9cfc-0a013286a0c8 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "045e54ff-9e2c-4b04-afac-34cb6580cb2c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.159s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 664.332775] env[68674]: DEBUG nova.network.neutron [req-ab10f2c4-b497-4aea-a74b-a6c13ace704d req-d2b35810-8a4c-41e4-9483-30e69f491f8f service nova] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Updated VIF entry in instance network info cache for port 3660c8d4-d8be-4132-b92b-f96aa37b627b. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 664.333231] env[68674]: DEBUG nova.network.neutron [req-ab10f2c4-b497-4aea-a74b-a6c13ace704d req-d2b35810-8a4c-41e4-9483-30e69f491f8f service nova] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Updating instance_info_cache with network_info: [{"id": "3660c8d4-d8be-4132-b92b-f96aa37b627b", "address": "fa:16:3e:60:3b:77", "network": {"id": "eae87694-bbf6-4eed-9305-26be80e0529b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1262353116-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c958fcb56a934ef7919b76aa2a193429", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3660c8d4-d8", "ovs_interfaceid": "3660c8d4-d8be-4132-b92b-f96aa37b627b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.338032] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c3f73da-fbe9-4ede-a564-c379429a8783 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.344839] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-699788ed-1843-4c88-9bad-dc0185000fde {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.365215] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-389e4faa-023c-4996-86df-e95061e81a7e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.375805] env[68674]: DEBUG nova.compute.provider_tree [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 664.380893] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1af7a62e-0a99-41bb-ab68-69b572a8abee {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.392058] env[68674]: DEBUG oslo_vmware.api [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Task: {'id': task-3239665, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.092257} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.392326] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 664.392582] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 664.392792] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 664.392993] env[68674]: INFO nova.compute.manager [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Took 1.08 seconds to destroy the instance on the hypervisor. [ 664.393300] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 664.393581] env[68674]: DEBUG nova.compute.manager [-] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 664.393698] env[68674]: DEBUG nova.network.neutron [-] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 664.401215] env[68674]: DEBUG nova.virt.block_device [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Updating existing volume attachment record: c57d2634-e154-45a2-9463-e37929a6d2a6 {{(pid=68674) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 664.417252] env[68674]: DEBUG nova.network.neutron [-] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 664.697872] env[68674]: DEBUG oslo_vmware.api [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]529c9711-903b-1797-4be7-c3a27e9c1197, 'name': SearchDatastore_Task, 'duration_secs': 0.009415} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.698146] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 664.698407] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 23891bad-1b63-4237-9243-78954cf67d52/23891bad-1b63-4237-9243-78954cf67d52.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 664.698657] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3b205fe2-766f-4703-be5c-27d7be4f9091 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.704663] env[68674]: DEBUG oslo_vmware.api [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 664.704663] env[68674]: value = "task-3239667" [ 664.704663] env[68674]: _type = "Task" [ 664.704663] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.712266] env[68674]: DEBUG oslo_vmware.api [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3239667, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.719184] env[68674]: DEBUG oslo_vmware.api [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3239666, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.191464} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.719510] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 664.723036] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbd848ed-8f75-4f96-ba69-65a07cd43c0b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.742981] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] 77fa5a89-961b-4c84-a75e-a5be0253677e/77fa5a89-961b-4c84-a75e-a5be0253677e.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 664.743289] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5946f377-40c0-428b-8891-092723a4dd96 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.763711] env[68674]: DEBUG oslo_vmware.api [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 664.763711] env[68674]: value = "task-3239668" [ 664.763711] env[68674]: _type = "Task" [ 664.763711] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.774319] env[68674]: DEBUG oslo_vmware.api [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3239668, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.841089] env[68674]: DEBUG nova.compute.manager [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 664.844085] env[68674]: DEBUG oslo_concurrency.lockutils [req-ab10f2c4-b497-4aea-a74b-a6c13ace704d req-d2b35810-8a4c-41e4-9483-30e69f491f8f service nova] Releasing lock "refresh_cache-23891bad-1b63-4237-9243-78954cf67d52" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 664.879264] env[68674]: DEBUG nova.scheduler.client.report [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 664.920180] env[68674]: DEBUG nova.network.neutron [-] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.947671] env[68674]: DEBUG nova.compute.manager [req-dd63cee8-31b9-49c1-98ed-52c975e88036 req-e0811dee-bce7-49aa-bef1-37c42fa36876 service nova] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Received event network-changed-55160236-eb1d-47d3-bca8-d3b46267c37f {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 664.947911] env[68674]: DEBUG nova.compute.manager [req-dd63cee8-31b9-49c1-98ed-52c975e88036 req-e0811dee-bce7-49aa-bef1-37c42fa36876 service nova] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Refreshing instance network info cache due to event network-changed-55160236-eb1d-47d3-bca8-d3b46267c37f. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 664.948320] env[68674]: DEBUG oslo_concurrency.lockutils [req-dd63cee8-31b9-49c1-98ed-52c975e88036 req-e0811dee-bce7-49aa-bef1-37c42fa36876 service nova] Acquiring lock "refresh_cache-045e54ff-9e2c-4b04-afac-34cb6580cb2c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.948738] env[68674]: DEBUG oslo_concurrency.lockutils [req-dd63cee8-31b9-49c1-98ed-52c975e88036 req-e0811dee-bce7-49aa-bef1-37c42fa36876 service nova] Acquired lock "refresh_cache-045e54ff-9e2c-4b04-afac-34cb6580cb2c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 664.948738] env[68674]: DEBUG nova.network.neutron [req-dd63cee8-31b9-49c1-98ed-52c975e88036 req-e0811dee-bce7-49aa-bef1-37c42fa36876 service nova] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Refreshing network info cache for port 55160236-eb1d-47d3-bca8-d3b46267c37f {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 665.114351] env[68674]: DEBUG nova.compute.manager [req-f04ffbf9-bc57-49df-ba8b-80061f0465a1 req-9a9d144d-972f-4dcd-add0-984e1344d56a service nova] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Received event network-vif-plugged-ad353ee1-e0ca-436b-b58e-eae548257eed {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 665.114636] env[68674]: DEBUG oslo_concurrency.lockutils [req-f04ffbf9-bc57-49df-ba8b-80061f0465a1 req-9a9d144d-972f-4dcd-add0-984e1344d56a service nova] Acquiring lock "6278d756-139c-4fcd-bf31-304c978d6682-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 665.116408] env[68674]: DEBUG oslo_concurrency.lockutils [req-f04ffbf9-bc57-49df-ba8b-80061f0465a1 req-9a9d144d-972f-4dcd-add0-984e1344d56a service nova] Lock "6278d756-139c-4fcd-bf31-304c978d6682-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 665.116408] env[68674]: DEBUG oslo_concurrency.lockutils [req-f04ffbf9-bc57-49df-ba8b-80061f0465a1 req-9a9d144d-972f-4dcd-add0-984e1344d56a service nova] Lock "6278d756-139c-4fcd-bf31-304c978d6682-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 665.116408] env[68674]: DEBUG nova.compute.manager [req-f04ffbf9-bc57-49df-ba8b-80061f0465a1 req-9a9d144d-972f-4dcd-add0-984e1344d56a service nova] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] No waiting events found dispatching network-vif-plugged-ad353ee1-e0ca-436b-b58e-eae548257eed {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 665.116408] env[68674]: WARNING nova.compute.manager [req-f04ffbf9-bc57-49df-ba8b-80061f0465a1 req-9a9d144d-972f-4dcd-add0-984e1344d56a service nova] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Received unexpected event network-vif-plugged-ad353ee1-e0ca-436b-b58e-eae548257eed for instance with vm_state building and task_state block_device_mapping. [ 665.216029] env[68674]: DEBUG oslo_vmware.api [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3239667, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.244437] env[68674]: DEBUG nova.network.neutron [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Successfully updated port: ad353ee1-e0ca-436b-b58e-eae548257eed {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 665.275464] env[68674]: DEBUG oslo_vmware.api [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3239668, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.367627] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 665.388017] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.744s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 665.388017] env[68674]: DEBUG nova.compute.manager [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 665.393316] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fbdb7c48-eeae-4e87-92c3-92e09b42a937 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.624s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 665.393316] env[68674]: DEBUG nova.objects.instance [None req-fbdb7c48-eeae-4e87-92c3-92e09b42a937 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lazy-loading 'resources' on Instance uuid 8790d635-fec5-4dcf-8cb0-220c2edec971 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 665.425818] env[68674]: INFO nova.compute.manager [-] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Took 1.03 seconds to deallocate network for instance. [ 665.720858] env[68674]: DEBUG oslo_vmware.api [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3239667, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.612966} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.720858] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 23891bad-1b63-4237-9243-78954cf67d52/23891bad-1b63-4237-9243-78954cf67d52.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 665.720858] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 665.720858] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c5e48ea8-5b21-4bd1-94de-238cd0f73f85 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.726952] env[68674]: DEBUG oslo_vmware.api [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 665.726952] env[68674]: value = "task-3239669" [ 665.726952] env[68674]: _type = "Task" [ 665.726952] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.732352] env[68674]: DEBUG oslo_vmware.api [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3239669, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.747085] env[68674]: DEBUG oslo_concurrency.lockutils [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Acquiring lock "refresh_cache-6278d756-139c-4fcd-bf31-304c978d6682" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.748548] env[68674]: DEBUG oslo_concurrency.lockutils [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Acquired lock "refresh_cache-6278d756-139c-4fcd-bf31-304c978d6682" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 665.748548] env[68674]: DEBUG nova.network.neutron [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 665.769650] env[68674]: DEBUG nova.network.neutron [req-dd63cee8-31b9-49c1-98ed-52c975e88036 req-e0811dee-bce7-49aa-bef1-37c42fa36876 service nova] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Updated VIF entry in instance network info cache for port 55160236-eb1d-47d3-bca8-d3b46267c37f. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 665.770028] env[68674]: DEBUG nova.network.neutron [req-dd63cee8-31b9-49c1-98ed-52c975e88036 req-e0811dee-bce7-49aa-bef1-37c42fa36876 service nova] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Updating instance_info_cache with network_info: [{"id": "55160236-eb1d-47d3-bca8-d3b46267c37f", "address": "fa:16:3e:3e:26:3e", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55160236-eb", "ovs_interfaceid": "55160236-eb1d-47d3-bca8-d3b46267c37f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.779515] env[68674]: DEBUG oslo_vmware.api [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3239668, 'name': ReconfigVM_Task, 'duration_secs': 0.594767} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.780384] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Reconfigured VM instance instance-0000001f to attach disk [datastore1] 77fa5a89-961b-4c84-a75e-a5be0253677e/77fa5a89-961b-4c84-a75e-a5be0253677e.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 665.780384] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fe97b7cb-7157-49f2-9406-2d5fd71c7510 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.785919] env[68674]: DEBUG oslo_vmware.api [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 665.785919] env[68674]: value = "task-3239670" [ 665.785919] env[68674]: _type = "Task" [ 665.785919] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.795424] env[68674]: DEBUG oslo_vmware.api [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3239670, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.900291] env[68674]: DEBUG nova.compute.utils [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 665.901803] env[68674]: DEBUG nova.compute.manager [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 665.901803] env[68674]: DEBUG nova.network.neutron [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 665.935317] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 665.940264] env[68674]: DEBUG nova.policy [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1891413e35f845a2b761f474df3eb6c8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3dceab4b22c34737bc85ee5a5ded00d3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 666.234398] env[68674]: DEBUG oslo_vmware.api [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3239669, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.12716} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.236883] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 666.238380] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7079726c-77e1-45f6-8d67-992bf2350369 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.263249] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] 23891bad-1b63-4237-9243-78954cf67d52/23891bad-1b63-4237-9243-78954cf67d52.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 666.264113] env[68674]: DEBUG nova.network.neutron [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Successfully created port: b42ed2ab-15a8-42e4-b62c-bb3188c711e0 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 666.267788] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a85a4b9c-165d-4b79-9f9c-bdd81af45c83 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.282457] env[68674]: DEBUG oslo_concurrency.lockutils [req-dd63cee8-31b9-49c1-98ed-52c975e88036 req-e0811dee-bce7-49aa-bef1-37c42fa36876 service nova] Releasing lock "refresh_cache-045e54ff-9e2c-4b04-afac-34cb6580cb2c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 666.290833] env[68674]: DEBUG oslo_vmware.api [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 666.290833] env[68674]: value = "task-3239671" [ 666.290833] env[68674]: _type = "Task" [ 666.290833] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.296830] env[68674]: DEBUG oslo_vmware.api [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3239670, 'name': Rename_Task, 'duration_secs': 0.195563} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.297934] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 666.298121] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-233d85fd-aecb-4c60-9718-a6f3f3fddcc1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.302819] env[68674]: DEBUG oslo_vmware.api [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3239671, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.307205] env[68674]: DEBUG oslo_vmware.api [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 666.307205] env[68674]: value = "task-3239672" [ 666.307205] env[68674]: _type = "Task" [ 666.307205] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.317015] env[68674]: DEBUG oslo_vmware.api [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3239672, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.341858] env[68674]: DEBUG nova.network.neutron [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 666.406223] env[68674]: DEBUG nova.compute.manager [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 666.515725] env[68674]: DEBUG nova.compute.manager [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 666.516301] env[68674]: DEBUG nova.virt.hardware [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 666.516527] env[68674]: DEBUG nova.virt.hardware [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 666.516682] env[68674]: DEBUG nova.virt.hardware [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 666.516863] env[68674]: DEBUG nova.virt.hardware [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 666.517013] env[68674]: DEBUG nova.virt.hardware [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 666.517166] env[68674]: DEBUG nova.virt.hardware [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 666.517371] env[68674]: DEBUG nova.virt.hardware [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 666.517534] env[68674]: DEBUG nova.virt.hardware [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 666.517696] env[68674]: DEBUG nova.virt.hardware [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 666.517861] env[68674]: DEBUG nova.virt.hardware [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 666.518047] env[68674]: DEBUG nova.virt.hardware [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 666.519162] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-564314ba-603c-4c79-be98-14e2fd4eb479 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.527391] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de2e8966-a35e-42a4-af37-d485168ac7a8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.650927] env[68674]: DEBUG nova.network.neutron [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Updating instance_info_cache with network_info: [{"id": "ad353ee1-e0ca-436b-b58e-eae548257eed", "address": "fa:16:3e:d6:3d:f2", "network": {"id": "81c15154-8675-4a83-ac4f-bf4e5160c6a3", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-935429665-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e935d3cc470460cb5fa899412f4b0d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad353ee1-e0", "ovs_interfaceid": "ad353ee1-e0ca-436b-b58e-eae548257eed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 666.728523] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25ce6d66-74da-43f1-b385-96fa16df52d5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.738633] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e59f696-d386-416e-a471-9061b3ca8809 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.768606] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-134fc0d9-30c8-4a69-9789-36967b9dadd8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.776494] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc50d15c-00e9-481d-88fa-e90c90d7c196 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.790667] env[68674]: DEBUG nova.compute.provider_tree [None req-fbdb7c48-eeae-4e87-92c3-92e09b42a937 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 666.801136] env[68674]: DEBUG oslo_vmware.api [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3239671, 'name': ReconfigVM_Task, 'duration_secs': 0.317589} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.801434] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Reconfigured VM instance instance-00000020 to attach disk [datastore1] 23891bad-1b63-4237-9243-78954cf67d52/23891bad-1b63-4237-9243-78954cf67d52.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 666.802328] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bc6aadc4-c8e2-43ce-9489-f589452abad7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.810755] env[68674]: DEBUG oslo_vmware.api [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 666.810755] env[68674]: value = "task-3239673" [ 666.810755] env[68674]: _type = "Task" [ 666.810755] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.825594] env[68674]: DEBUG oslo_vmware.api [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3239672, 'name': PowerOnVM_Task} progress is 71%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.825895] env[68674]: DEBUG oslo_vmware.api [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3239673, 'name': Rename_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.139946] env[68674]: DEBUG nova.compute.manager [req-108ebf8b-c15e-4912-b77e-d260ebf27889 req-6a37e939-002b-433d-93d1-17fc900ca547 service nova] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Received event network-changed-ad353ee1-e0ca-436b-b58e-eae548257eed {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 667.140191] env[68674]: DEBUG nova.compute.manager [req-108ebf8b-c15e-4912-b77e-d260ebf27889 req-6a37e939-002b-433d-93d1-17fc900ca547 service nova] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Refreshing instance network info cache due to event network-changed-ad353ee1-e0ca-436b-b58e-eae548257eed. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 667.140387] env[68674]: DEBUG oslo_concurrency.lockutils [req-108ebf8b-c15e-4912-b77e-d260ebf27889 req-6a37e939-002b-433d-93d1-17fc900ca547 service nova] Acquiring lock "refresh_cache-6278d756-139c-4fcd-bf31-304c978d6682" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.153763] env[68674]: DEBUG oslo_concurrency.lockutils [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Releasing lock "refresh_cache-6278d756-139c-4fcd-bf31-304c978d6682" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 667.154086] env[68674]: DEBUG nova.compute.manager [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Instance network_info: |[{"id": "ad353ee1-e0ca-436b-b58e-eae548257eed", "address": "fa:16:3e:d6:3d:f2", "network": {"id": "81c15154-8675-4a83-ac4f-bf4e5160c6a3", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-935429665-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e935d3cc470460cb5fa899412f4b0d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad353ee1-e0", "ovs_interfaceid": "ad353ee1-e0ca-436b-b58e-eae548257eed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 667.154354] env[68674]: DEBUG oslo_concurrency.lockutils [req-108ebf8b-c15e-4912-b77e-d260ebf27889 req-6a37e939-002b-433d-93d1-17fc900ca547 service nova] Acquired lock "refresh_cache-6278d756-139c-4fcd-bf31-304c978d6682" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 667.154528] env[68674]: DEBUG nova.network.neutron [req-108ebf8b-c15e-4912-b77e-d260ebf27889 req-6a37e939-002b-433d-93d1-17fc900ca547 service nova] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Refreshing network info cache for port ad353ee1-e0ca-436b-b58e-eae548257eed {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 667.155725] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:3d:f2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ad353ee1-e0ca-436b-b58e-eae548257eed', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 667.163258] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Creating folder: Project (7e935d3cc470460cb5fa899412f4b0d1). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 667.164349] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7b635e3d-0df0-4e85-94ca-99465c1131fa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.181214] env[68674]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 667.181311] env[68674]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=68674) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 667.181828] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Folder already exists: Project (7e935d3cc470460cb5fa899412f4b0d1). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 667.182022] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Creating folder: Instances. Parent ref: group-v647407. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 667.182248] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bb8d9b59-692c-4188-b628-aea5acba336c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.192019] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Created folder: Instances in parent group-v647407. [ 667.192019] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 667.192158] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 667.192319] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-87d942ca-d9df-4117-836c-21fa8cc84f05 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.212504] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 667.212504] env[68674]: value = "task-3239676" [ 667.212504] env[68674]: _type = "Task" [ 667.212504] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.226599] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239676, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.296975] env[68674]: DEBUG nova.scheduler.client.report [None req-fbdb7c48-eeae-4e87-92c3-92e09b42a937 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 667.326841] env[68674]: DEBUG oslo_vmware.api [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3239672, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.330711] env[68674]: DEBUG oslo_vmware.api [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3239673, 'name': Rename_Task, 'duration_secs': 0.183397} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.331364] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 667.331636] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-38555d7a-d466-4f94-8a5a-acc83f61b9ac {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.337881] env[68674]: DEBUG oslo_vmware.api [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 667.337881] env[68674]: value = "task-3239677" [ 667.337881] env[68674]: _type = "Task" [ 667.337881] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.350302] env[68674]: DEBUG oslo_vmware.api [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3239677, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.419159] env[68674]: DEBUG nova.compute.manager [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 667.446959] env[68674]: DEBUG nova.virt.hardware [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 667.447303] env[68674]: DEBUG nova.virt.hardware [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 667.447471] env[68674]: DEBUG nova.virt.hardware [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 667.447655] env[68674]: DEBUG nova.virt.hardware [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 667.447805] env[68674]: DEBUG nova.virt.hardware [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 667.447956] env[68674]: DEBUG nova.virt.hardware [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 667.448200] env[68674]: DEBUG nova.virt.hardware [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 667.448362] env[68674]: DEBUG nova.virt.hardware [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 667.448534] env[68674]: DEBUG nova.virt.hardware [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 667.448693] env[68674]: DEBUG nova.virt.hardware [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 667.449303] env[68674]: DEBUG nova.virt.hardware [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 667.449739] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15e2b13d-ee99-4acd-adbe-4abd60150bdf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.458643] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-392fb815-9468-4c74-b942-f024eefad4b2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.722036] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239676, 'name': CreateVM_Task, 'duration_secs': 0.495871} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.722221] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 667.722843] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'device_type': None, 'attachment_id': 'c57d2634-e154-45a2-9463-e37929a6d2a6', 'mount_device': '/dev/sda', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647422', 'volume_id': '6312e471-ebb6-489c-a453-cfed8d42b5ac', 'name': 'volume-6312e471-ebb6-489c-a453-cfed8d42b5ac', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6278d756-139c-4fcd-bf31-304c978d6682', 'attached_at': '', 'detached_at': '', 'volume_id': '6312e471-ebb6-489c-a453-cfed8d42b5ac', 'serial': '6312e471-ebb6-489c-a453-cfed8d42b5ac'}, 'boot_index': 0, 'disk_bus': None, 'guest_format': None, 'delete_on_termination': True, 'volume_type': None}], 'swap': None} {{(pid=68674) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 667.723203] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Root volume attach. Driver type: vmdk {{(pid=68674) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 667.723884] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5a94ee0-2f09-4e4b-9971-26bb5a5ac6fe {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.731247] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daa1d820-103d-4e95-a0aa-8b4f44808d86 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.737256] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e2855d9-87dc-4664-90fc-400e4c7d574a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.743410] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-7691504b-7584-48f3-abcc-03728af63984 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.752356] env[68674]: DEBUG oslo_vmware.api [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Waiting for the task: (returnval){ [ 667.752356] env[68674]: value = "task-3239678" [ 667.752356] env[68674]: _type = "Task" [ 667.752356] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.767315] env[68674]: DEBUG oslo_vmware.api [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Task: {'id': task-3239678, 'name': RelocateVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.802794] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fbdb7c48-eeae-4e87-92c3-92e09b42a937 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.414s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 667.805327] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4a8a9700-17bb-4583-88e2-83c4f4fd4a2c tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.524s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 667.805607] env[68674]: DEBUG nova.objects.instance [None req-4a8a9700-17bb-4583-88e2-83c4f4fd4a2c tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Lazy-loading 'resources' on Instance uuid 60ded0c9-7e20-4071-b5ce-9189d8d01d5c {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 667.823913] env[68674]: DEBUG oslo_vmware.api [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3239672, 'name': PowerOnVM_Task, 'duration_secs': 1.043835} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.824195] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 667.824391] env[68674]: INFO nova.compute.manager [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Took 9.69 seconds to spawn the instance on the hypervisor. [ 667.824563] env[68674]: DEBUG nova.compute.manager [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 667.825445] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-301e362a-4ea6-4bac-badb-fcdecd83d3b6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.843513] env[68674]: INFO nova.scheduler.client.report [None req-fbdb7c48-eeae-4e87-92c3-92e09b42a937 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Deleted allocations for instance 8790d635-fec5-4dcf-8cb0-220c2edec971 [ 667.851746] env[68674]: DEBUG oslo_vmware.api [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3239677, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.041628] env[68674]: DEBUG nova.network.neutron [req-108ebf8b-c15e-4912-b77e-d260ebf27889 req-6a37e939-002b-433d-93d1-17fc900ca547 service nova] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Updated VIF entry in instance network info cache for port ad353ee1-e0ca-436b-b58e-eae548257eed. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 668.042072] env[68674]: DEBUG nova.network.neutron [req-108ebf8b-c15e-4912-b77e-d260ebf27889 req-6a37e939-002b-433d-93d1-17fc900ca547 service nova] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Updating instance_info_cache with network_info: [{"id": "ad353ee1-e0ca-436b-b58e-eae548257eed", "address": "fa:16:3e:d6:3d:f2", "network": {"id": "81c15154-8675-4a83-ac4f-bf4e5160c6a3", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-935429665-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e935d3cc470460cb5fa899412f4b0d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad353ee1-e0", "ovs_interfaceid": "ad353ee1-e0ca-436b-b58e-eae548257eed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.203492] env[68674]: DEBUG nova.network.neutron [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Successfully updated port: b42ed2ab-15a8-42e4-b62c-bb3188c711e0 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 668.262907] env[68674]: DEBUG oslo_vmware.api [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Task: {'id': task-3239678, 'name': RelocateVM_Task, 'duration_secs': 0.42771} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.263216] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Volume attach. Driver type: vmdk {{(pid=68674) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 668.263418] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647422', 'volume_id': '6312e471-ebb6-489c-a453-cfed8d42b5ac', 'name': 'volume-6312e471-ebb6-489c-a453-cfed8d42b5ac', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6278d756-139c-4fcd-bf31-304c978d6682', 'attached_at': '', 'detached_at': '', 'volume_id': '6312e471-ebb6-489c-a453-cfed8d42b5ac', 'serial': '6312e471-ebb6-489c-a453-cfed8d42b5ac'} {{(pid=68674) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 668.264184] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bfc9d82-37c2-4dc4-afa4-600ff55d4049 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.279404] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3d8c763-cac7-4f42-8095-14ff93989a10 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.303166] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Reconfiguring VM instance instance-00000021 to attach disk [datastore2] volume-6312e471-ebb6-489c-a453-cfed8d42b5ac/volume-6312e471-ebb6-489c-a453-cfed8d42b5ac.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 668.303539] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e4775620-7214-4427-9af7-d4805caa11a4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.326749] env[68674]: DEBUG oslo_vmware.api [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Waiting for the task: (returnval){ [ 668.326749] env[68674]: value = "task-3239679" [ 668.326749] env[68674]: _type = "Task" [ 668.326749] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.336094] env[68674]: DEBUG oslo_vmware.api [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Task: {'id': task-3239679, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.347037] env[68674]: INFO nova.compute.manager [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Took 50.19 seconds to build instance. [ 668.355953] env[68674]: DEBUG oslo_vmware.api [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3239677, 'name': PowerOnVM_Task, 'duration_secs': 0.553704} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.356446] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fbdb7c48-eeae-4e87-92c3-92e09b42a937 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "8790d635-fec5-4dcf-8cb0-220c2edec971" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.100s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 668.357601] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 668.357729] env[68674]: INFO nova.compute.manager [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Took 7.40 seconds to spawn the instance on the hypervisor. [ 668.357920] env[68674]: DEBUG nova.compute.manager [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 668.359784] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31e800b6-50cd-47fa-8b4a-a0b24bae88e0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.544343] env[68674]: DEBUG oslo_concurrency.lockutils [req-108ebf8b-c15e-4912-b77e-d260ebf27889 req-6a37e939-002b-433d-93d1-17fc900ca547 service nova] Releasing lock "refresh_cache-6278d756-139c-4fcd-bf31-304c978d6682" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 668.705586] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "refresh_cache-d167585b-11f4-462c-b12e-c6a440c1476a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.705756] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquired lock "refresh_cache-d167585b-11f4-462c-b12e-c6a440c1476a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 668.705884] env[68674]: DEBUG nova.network.neutron [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 668.838277] env[68674]: DEBUG oslo_vmware.api [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Task: {'id': task-3239679, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.840551] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c32e7ace-e021-4a5f-b76c-9d97e1196f43 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.847488] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86095877-2d22-4277-b870-476367784977 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.851401] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf6f85d9-47f5-4120-adfb-36df7be69761 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lock "77fa5a89-961b-4c84-a75e-a5be0253677e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.144s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 668.888431] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e90c4b4-569d-4ca5-b289-fd6ba2a73aa9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.892867] env[68674]: INFO nova.compute.manager [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Took 48.71 seconds to build instance. [ 668.899044] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b202a9b1-60ff-4005-a642-a11f9bed1236 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.913096] env[68674]: DEBUG nova.compute.provider_tree [None req-4a8a9700-17bb-4583-88e2-83c4f4fd4a2c tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 669.274458] env[68674]: DEBUG nova.network.neutron [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 669.323999] env[68674]: DEBUG nova.compute.manager [req-d1e022be-7819-4d6a-9dbc-f034278aaa06 req-78f8a4f2-3c8b-4b77-b9e1-9ab35860a33d service nova] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Received event network-vif-plugged-b42ed2ab-15a8-42e4-b62c-bb3188c711e0 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 669.324231] env[68674]: DEBUG oslo_concurrency.lockutils [req-d1e022be-7819-4d6a-9dbc-f034278aaa06 req-78f8a4f2-3c8b-4b77-b9e1-9ab35860a33d service nova] Acquiring lock "d167585b-11f4-462c-b12e-c6a440c1476a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 669.324447] env[68674]: DEBUG oslo_concurrency.lockutils [req-d1e022be-7819-4d6a-9dbc-f034278aaa06 req-78f8a4f2-3c8b-4b77-b9e1-9ab35860a33d service nova] Lock "d167585b-11f4-462c-b12e-c6a440c1476a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 669.324698] env[68674]: DEBUG oslo_concurrency.lockutils [req-d1e022be-7819-4d6a-9dbc-f034278aaa06 req-78f8a4f2-3c8b-4b77-b9e1-9ab35860a33d service nova] Lock "d167585b-11f4-462c-b12e-c6a440c1476a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 669.324772] env[68674]: DEBUG nova.compute.manager [req-d1e022be-7819-4d6a-9dbc-f034278aaa06 req-78f8a4f2-3c8b-4b77-b9e1-9ab35860a33d service nova] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] No waiting events found dispatching network-vif-plugged-b42ed2ab-15a8-42e4-b62c-bb3188c711e0 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 669.324943] env[68674]: WARNING nova.compute.manager [req-d1e022be-7819-4d6a-9dbc-f034278aaa06 req-78f8a4f2-3c8b-4b77-b9e1-9ab35860a33d service nova] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Received unexpected event network-vif-plugged-b42ed2ab-15a8-42e4-b62c-bb3188c711e0 for instance with vm_state building and task_state spawning. [ 669.325174] env[68674]: DEBUG nova.compute.manager [req-d1e022be-7819-4d6a-9dbc-f034278aaa06 req-78f8a4f2-3c8b-4b77-b9e1-9ab35860a33d service nova] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Received event network-changed-b42ed2ab-15a8-42e4-b62c-bb3188c711e0 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 669.325269] env[68674]: DEBUG nova.compute.manager [req-d1e022be-7819-4d6a-9dbc-f034278aaa06 req-78f8a4f2-3c8b-4b77-b9e1-9ab35860a33d service nova] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Refreshing instance network info cache due to event network-changed-b42ed2ab-15a8-42e4-b62c-bb3188c711e0. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 669.325444] env[68674]: DEBUG oslo_concurrency.lockutils [req-d1e022be-7819-4d6a-9dbc-f034278aaa06 req-78f8a4f2-3c8b-4b77-b9e1-9ab35860a33d service nova] Acquiring lock "refresh_cache-d167585b-11f4-462c-b12e-c6a440c1476a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.339600] env[68674]: DEBUG oslo_vmware.api [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Task: {'id': task-3239679, 'name': ReconfigVM_Task, 'duration_secs': 0.531829} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.339727] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Reconfigured VM instance instance-00000021 to attach disk [datastore2] volume-6312e471-ebb6-489c-a453-cfed8d42b5ac/volume-6312e471-ebb6-489c-a453-cfed8d42b5ac.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 669.345258] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4e459ada-5d36-453e-b3e0-d8bf77b54962 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.356471] env[68674]: DEBUG nova.compute.manager [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 669.364228] env[68674]: DEBUG oslo_vmware.api [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Waiting for the task: (returnval){ [ 669.364228] env[68674]: value = "task-3239680" [ 669.364228] env[68674]: _type = "Task" [ 669.364228] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.374121] env[68674]: DEBUG oslo_vmware.api [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Task: {'id': task-3239680, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.394896] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ab441df-287f-4621-a9eb-38c918b3b59f tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "23891bad-1b63-4237-9243-78954cf67d52" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.526s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 669.415922] env[68674]: DEBUG nova.scheduler.client.report [None req-4a8a9700-17bb-4583-88e2-83c4f4fd4a2c tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 669.486649] env[68674]: DEBUG nova.network.neutron [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Updating instance_info_cache with network_info: [{"id": "b42ed2ab-15a8-42e4-b62c-bb3188c711e0", "address": "fa:16:3e:75:e9:0b", "network": {"id": "896418b0-8817-49dc-a965-e44ed5221810", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1185393062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3dceab4b22c34737bc85ee5a5ded00d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e7f6f41-f4eb-4832-a390-730fca1cf717", "external-id": "nsx-vlan-transportzone-724", "segmentation_id": 724, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb42ed2ab-15", "ovs_interfaceid": "b42ed2ab-15a8-42e4-b62c-bb3188c711e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.881597] env[68674]: DEBUG oslo_vmware.api [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Task: {'id': task-3239680, 'name': ReconfigVM_Task, 'duration_secs': 0.215863} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.882578] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 669.882883] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647422', 'volume_id': '6312e471-ebb6-489c-a453-cfed8d42b5ac', 'name': 'volume-6312e471-ebb6-489c-a453-cfed8d42b5ac', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6278d756-139c-4fcd-bf31-304c978d6682', 'attached_at': '', 'detached_at': '', 'volume_id': '6312e471-ebb6-489c-a453-cfed8d42b5ac', 'serial': '6312e471-ebb6-489c-a453-cfed8d42b5ac'} {{(pid=68674) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 669.883524] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d4ea6631-3755-4931-9412-086ef8f69f03 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.890466] env[68674]: DEBUG oslo_vmware.api [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Waiting for the task: (returnval){ [ 669.890466] env[68674]: value = "task-3239681" [ 669.890466] env[68674]: _type = "Task" [ 669.890466] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.898660] env[68674]: DEBUG nova.compute.manager [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 669.901118] env[68674]: DEBUG oslo_vmware.api [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Task: {'id': task-3239681, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.921427] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4a8a9700-17bb-4583-88e2-83c4f4fd4a2c tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.116s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 669.924919] env[68674]: DEBUG oslo_concurrency.lockutils [None req-17b897d9-07f5-4fee-8b0d-75ba33635ac6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.213s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 669.925342] env[68674]: DEBUG nova.objects.instance [None req-17b897d9-07f5-4fee-8b0d-75ba33635ac6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lazy-loading 'resources' on Instance uuid 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 669.946039] env[68674]: INFO nova.scheduler.client.report [None req-4a8a9700-17bb-4583-88e2-83c4f4fd4a2c tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Deleted allocations for instance 60ded0c9-7e20-4071-b5ce-9189d8d01d5c [ 669.971198] env[68674]: DEBUG nova.compute.manager [req-573d31bc-7f9d-4266-863d-025c0adc9da9 req-9f58935f-83b4-4c37-a495-49fcaa16eb8f service nova] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Received event network-changed-3660c8d4-d8be-4132-b92b-f96aa37b627b {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 669.971414] env[68674]: DEBUG nova.compute.manager [req-573d31bc-7f9d-4266-863d-025c0adc9da9 req-9f58935f-83b4-4c37-a495-49fcaa16eb8f service nova] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Refreshing instance network info cache due to event network-changed-3660c8d4-d8be-4132-b92b-f96aa37b627b. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 669.971518] env[68674]: DEBUG oslo_concurrency.lockutils [req-573d31bc-7f9d-4266-863d-025c0adc9da9 req-9f58935f-83b4-4c37-a495-49fcaa16eb8f service nova] Acquiring lock "refresh_cache-23891bad-1b63-4237-9243-78954cf67d52" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.971644] env[68674]: DEBUG oslo_concurrency.lockutils [req-573d31bc-7f9d-4266-863d-025c0adc9da9 req-9f58935f-83b4-4c37-a495-49fcaa16eb8f service nova] Acquired lock "refresh_cache-23891bad-1b63-4237-9243-78954cf67d52" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 669.971845] env[68674]: DEBUG nova.network.neutron [req-573d31bc-7f9d-4266-863d-025c0adc9da9 req-9f58935f-83b4-4c37-a495-49fcaa16eb8f service nova] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Refreshing network info cache for port 3660c8d4-d8be-4132-b92b-f96aa37b627b {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 669.989677] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Releasing lock "refresh_cache-d167585b-11f4-462c-b12e-c6a440c1476a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 669.989943] env[68674]: DEBUG nova.compute.manager [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Instance network_info: |[{"id": "b42ed2ab-15a8-42e4-b62c-bb3188c711e0", "address": "fa:16:3e:75:e9:0b", "network": {"id": "896418b0-8817-49dc-a965-e44ed5221810", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1185393062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3dceab4b22c34737bc85ee5a5ded00d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e7f6f41-f4eb-4832-a390-730fca1cf717", "external-id": "nsx-vlan-transportzone-724", "segmentation_id": 724, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb42ed2ab-15", "ovs_interfaceid": "b42ed2ab-15a8-42e4-b62c-bb3188c711e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 669.990273] env[68674]: DEBUG oslo_concurrency.lockutils [req-d1e022be-7819-4d6a-9dbc-f034278aaa06 req-78f8a4f2-3c8b-4b77-b9e1-9ab35860a33d service nova] Acquired lock "refresh_cache-d167585b-11f4-462c-b12e-c6a440c1476a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 669.990456] env[68674]: DEBUG nova.network.neutron [req-d1e022be-7819-4d6a-9dbc-f034278aaa06 req-78f8a4f2-3c8b-4b77-b9e1-9ab35860a33d service nova] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Refreshing network info cache for port b42ed2ab-15a8-42e4-b62c-bb3188c711e0 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 669.991859] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:75:e9:0b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8e7f6f41-f4eb-4832-a390-730fca1cf717', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b42ed2ab-15a8-42e4-b62c-bb3188c711e0', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 669.999853] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 670.001992] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 670.006220] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-11211e15-c9d7-4c64-9700-d19398b6367d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.028236] env[68674]: DEBUG nova.compute.manager [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Stashing vm_state: active {{(pid=68674) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 670.033322] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 670.033322] env[68674]: value = "task-3239682" [ 670.033322] env[68674]: _type = "Task" [ 670.033322] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.042892] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239682, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.400898] env[68674]: DEBUG oslo_vmware.api [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Task: {'id': task-3239681, 'name': Rename_Task, 'duration_secs': 0.133234} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.401176] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 670.401515] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-37a50e49-813b-4b4d-897d-0fc06131b159 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.411706] env[68674]: DEBUG oslo_vmware.api [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Waiting for the task: (returnval){ [ 670.411706] env[68674]: value = "task-3239683" [ 670.411706] env[68674]: _type = "Task" [ 670.411706] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.424315] env[68674]: DEBUG oslo_vmware.api [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Task: {'id': task-3239683, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.425362] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 670.458526] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4a8a9700-17bb-4583-88e2-83c4f4fd4a2c tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Lock "60ded0c9-7e20-4071-b5ce-9189d8d01d5c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.399s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 670.553531] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239682, 'name': CreateVM_Task, 'duration_secs': 0.304755} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.554618] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 670.557266] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 670.558378] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.558602] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 670.558941] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 670.559212] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46128b59-0dc8-4c89-87fb-12c251e88c34 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.564852] env[68674]: DEBUG oslo_vmware.api [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 670.564852] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527da3ec-2388-b02d-fda8-3bb65d205d50" [ 670.564852] env[68674]: _type = "Task" [ 670.564852] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.575752] env[68674]: DEBUG oslo_vmware.api [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527da3ec-2388-b02d-fda8-3bb65d205d50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.922560] env[68674]: DEBUG oslo_vmware.api [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Task: {'id': task-3239683, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.075858] env[68674]: DEBUG oslo_vmware.api [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527da3ec-2388-b02d-fda8-3bb65d205d50, 'name': SearchDatastore_Task, 'duration_secs': 0.011375} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.077077] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 671.077286] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 671.077522] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.077667] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 671.077854] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 671.078849] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c408e6e-98f9-4345-96bb-9a9b9c61afc3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.081815] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-665c994b-d0ac-4cb5-952d-05c3a70a26d0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.090119] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8445fa6-9d0d-4a48-b4bf-4303d7875231 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.094631] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 671.095258] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 671.097340] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fcd9f77-8bee-49cd-88fb-cb9f81e6b87b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.138273] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efa42113-b3a1-403a-a780-b919ed5dd35b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.142439] env[68674]: DEBUG oslo_vmware.api [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 671.142439] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52e88315-5c55-e910-03a4-11a511cb98e4" [ 671.142439] env[68674]: _type = "Task" [ 671.142439] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.152472] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb6ec87e-b615-42a7-af14-5cd9ed665e69 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.159366] env[68674]: DEBUG oslo_vmware.api [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52e88315-5c55-e910-03a4-11a511cb98e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.171030] env[68674]: DEBUG nova.compute.provider_tree [None req-17b897d9-07f5-4fee-8b0d-75ba33635ac6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 671.280158] env[68674]: DEBUG nova.network.neutron [req-d1e022be-7819-4d6a-9dbc-f034278aaa06 req-78f8a4f2-3c8b-4b77-b9e1-9ab35860a33d service nova] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Updated VIF entry in instance network info cache for port b42ed2ab-15a8-42e4-b62c-bb3188c711e0. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 671.280660] env[68674]: DEBUG nova.network.neutron [req-d1e022be-7819-4d6a-9dbc-f034278aaa06 req-78f8a4f2-3c8b-4b77-b9e1-9ab35860a33d service nova] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Updating instance_info_cache with network_info: [{"id": "b42ed2ab-15a8-42e4-b62c-bb3188c711e0", "address": "fa:16:3e:75:e9:0b", "network": {"id": "896418b0-8817-49dc-a965-e44ed5221810", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1185393062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3dceab4b22c34737bc85ee5a5ded00d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e7f6f41-f4eb-4832-a390-730fca1cf717", "external-id": "nsx-vlan-transportzone-724", "segmentation_id": 724, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb42ed2ab-15", "ovs_interfaceid": "b42ed2ab-15a8-42e4-b62c-bb3188c711e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.289877] env[68674]: DEBUG nova.network.neutron [req-573d31bc-7f9d-4266-863d-025c0adc9da9 req-9f58935f-83b4-4c37-a495-49fcaa16eb8f service nova] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Updated VIF entry in instance network info cache for port 3660c8d4-d8be-4132-b92b-f96aa37b627b. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 671.290233] env[68674]: DEBUG nova.network.neutron [req-573d31bc-7f9d-4266-863d-025c0adc9da9 req-9f58935f-83b4-4c37-a495-49fcaa16eb8f service nova] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Updating instance_info_cache with network_info: [{"id": "3660c8d4-d8be-4132-b92b-f96aa37b627b", "address": "fa:16:3e:60:3b:77", "network": {"id": "eae87694-bbf6-4eed-9305-26be80e0529b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1262353116-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c958fcb56a934ef7919b76aa2a193429", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3660c8d4-d8", "ovs_interfaceid": "3660c8d4-d8be-4132-b92b-f96aa37b627b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.423313] env[68674]: DEBUG oslo_vmware.api [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Task: {'id': task-3239683, 'name': PowerOnVM_Task, 'duration_secs': 0.717712} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.423530] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 671.423727] env[68674]: INFO nova.compute.manager [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Took 4.91 seconds to spawn the instance on the hypervisor. [ 671.423918] env[68674]: DEBUG nova.compute.manager [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 671.424738] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dd92f82-b42d-446b-a7b3-0d2cbbe35fab {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.654868] env[68674]: DEBUG oslo_vmware.api [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52e88315-5c55-e910-03a4-11a511cb98e4, 'name': SearchDatastore_Task, 'duration_secs': 0.021961} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.659628] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-648ccc68-256f-4c15-aa9f-2eb6fbba2712 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.664947] env[68674]: DEBUG oslo_vmware.api [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 671.664947] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52524bba-fc2b-5ade-3699-8502b53eb79f" [ 671.664947] env[68674]: _type = "Task" [ 671.664947] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.675180] env[68674]: DEBUG nova.scheduler.client.report [None req-17b897d9-07f5-4fee-8b0d-75ba33635ac6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 671.678708] env[68674]: DEBUG oslo_vmware.api [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52524bba-fc2b-5ade-3699-8502b53eb79f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.783698] env[68674]: DEBUG oslo_concurrency.lockutils [req-d1e022be-7819-4d6a-9dbc-f034278aaa06 req-78f8a4f2-3c8b-4b77-b9e1-9ab35860a33d service nova] Releasing lock "refresh_cache-d167585b-11f4-462c-b12e-c6a440c1476a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 671.795583] env[68674]: DEBUG oslo_concurrency.lockutils [req-573d31bc-7f9d-4266-863d-025c0adc9da9 req-9f58935f-83b4-4c37-a495-49fcaa16eb8f service nova] Releasing lock "refresh_cache-23891bad-1b63-4237-9243-78954cf67d52" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 671.941623] env[68674]: INFO nova.compute.manager [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Took 44.43 seconds to build instance. [ 672.176828] env[68674]: DEBUG oslo_vmware.api [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52524bba-fc2b-5ade-3699-8502b53eb79f, 'name': SearchDatastore_Task, 'duration_secs': 0.012445} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.177111] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 672.177403] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] d167585b-11f4-462c-b12e-c6a440c1476a/d167585b-11f4-462c-b12e-c6a440c1476a.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 672.177628] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-334fdb38-0f1a-4aa1-9826-39865428704f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.180060] env[68674]: DEBUG oslo_concurrency.lockutils [None req-17b897d9-07f5-4fee-8b0d-75ba33635ac6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.255s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 672.182255] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.247s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 672.182991] env[68674]: DEBUG nova.objects.instance [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Lazy-loading 'resources' on Instance uuid f9168b78-ed64-4109-84f0-db0af61d2f10 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 672.189683] env[68674]: DEBUG oslo_vmware.api [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 672.189683] env[68674]: value = "task-3239684" [ 672.189683] env[68674]: _type = "Task" [ 672.189683] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.199920] env[68674]: DEBUG oslo_vmware.api [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239684, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.203142] env[68674]: INFO nova.scheduler.client.report [None req-17b897d9-07f5-4fee-8b0d-75ba33635ac6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Deleted allocations for instance 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b [ 672.445362] env[68674]: DEBUG oslo_concurrency.lockutils [None req-972eb41d-bdf2-4fb4-be9c-4a4a3c588eac tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Lock "6278d756-139c-4fcd-bf31-304c978d6682" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.619s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 672.703374] env[68674]: DEBUG oslo_vmware.api [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239684, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.717914] env[68674]: DEBUG oslo_concurrency.lockutils [None req-17b897d9-07f5-4fee-8b0d-75ba33635ac6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lock "7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.284s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 672.732249] env[68674]: DEBUG oslo_concurrency.lockutils [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Acquiring lock "55727bbc-6b65-4e4c-ba4f-8240efbf052a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 672.732827] env[68674]: DEBUG oslo_concurrency.lockutils [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Lock "55727bbc-6b65-4e4c-ba4f-8240efbf052a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 672.948165] env[68674]: DEBUG nova.compute.manager [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 673.200155] env[68674]: DEBUG oslo_vmware.api [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239684, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.721326} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.202835] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] d167585b-11f4-462c-b12e-c6a440c1476a/d167585b-11f4-462c-b12e-c6a440c1476a.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 673.203072] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 673.204031] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3dbfab4c-a4c8-4a4f-9aeb-e8b6fa295ad7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.211291] env[68674]: DEBUG oslo_vmware.api [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 673.211291] env[68674]: value = "task-3239685" [ 673.211291] env[68674]: _type = "Task" [ 673.211291] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.219206] env[68674]: DEBUG nova.compute.manager [req-69736522-c014-4acb-83c6-e5210a12147e req-762332bb-75df-4fd9-a4b9-a0eb3385c8ee service nova] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Received event network-changed-ad353ee1-e0ca-436b-b58e-eae548257eed {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 673.219389] env[68674]: DEBUG nova.compute.manager [req-69736522-c014-4acb-83c6-e5210a12147e req-762332bb-75df-4fd9-a4b9-a0eb3385c8ee service nova] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Refreshing instance network info cache due to event network-changed-ad353ee1-e0ca-436b-b58e-eae548257eed. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 673.219621] env[68674]: DEBUG oslo_concurrency.lockutils [req-69736522-c014-4acb-83c6-e5210a12147e req-762332bb-75df-4fd9-a4b9-a0eb3385c8ee service nova] Acquiring lock "refresh_cache-6278d756-139c-4fcd-bf31-304c978d6682" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.219771] env[68674]: DEBUG oslo_concurrency.lockutils [req-69736522-c014-4acb-83c6-e5210a12147e req-762332bb-75df-4fd9-a4b9-a0eb3385c8ee service nova] Acquired lock "refresh_cache-6278d756-139c-4fcd-bf31-304c978d6682" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 673.219924] env[68674]: DEBUG nova.network.neutron [req-69736522-c014-4acb-83c6-e5210a12147e req-762332bb-75df-4fd9-a4b9-a0eb3385c8ee service nova] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Refreshing network info cache for port ad353ee1-e0ca-436b-b58e-eae548257eed {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 673.228953] env[68674]: DEBUG oslo_vmware.api [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239685, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.334497] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6222280e-651c-4f99-b6cf-b6787a571268 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.343738] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-058bb43e-24a0-4003-ae37-0f8f8364c74d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.377135] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e165568f-c7fb-4039-85b7-96010bac4ae3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.384860] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53e78a5f-5fc2-43f9-b151-a1f48af95395 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.399290] env[68674]: DEBUG nova.compute.provider_tree [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 673.471026] env[68674]: DEBUG oslo_concurrency.lockutils [None req-06a795e5-d487-4b51-a29a-d034706d72f6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquiring lock "160d9aa2-048d-45a2-ab55-581c8721ac3b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 673.471298] env[68674]: DEBUG oslo_concurrency.lockutils [None req-06a795e5-d487-4b51-a29a-d034706d72f6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lock "160d9aa2-048d-45a2-ab55-581c8721ac3b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 673.471500] env[68674]: DEBUG oslo_concurrency.lockutils [None req-06a795e5-d487-4b51-a29a-d034706d72f6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquiring lock "160d9aa2-048d-45a2-ab55-581c8721ac3b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 673.471684] env[68674]: DEBUG oslo_concurrency.lockutils [None req-06a795e5-d487-4b51-a29a-d034706d72f6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lock "160d9aa2-048d-45a2-ab55-581c8721ac3b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 673.471878] env[68674]: DEBUG oslo_concurrency.lockutils [None req-06a795e5-d487-4b51-a29a-d034706d72f6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lock "160d9aa2-048d-45a2-ab55-581c8721ac3b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 673.473979] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 673.474672] env[68674]: INFO nova.compute.manager [None req-06a795e5-d487-4b51-a29a-d034706d72f6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Terminating instance [ 673.726149] env[68674]: DEBUG oslo_vmware.api [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239685, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079243} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.726149] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 673.726149] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1640084-7e57-46a7-9eca-114990af6f46 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.749999] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Reconfiguring VM instance instance-00000022 to attach disk [datastore2] d167585b-11f4-462c-b12e-c6a440c1476a/d167585b-11f4-462c-b12e-c6a440c1476a.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 673.751038] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1dc1a452-90f8-46b6-a361-65c01e0acda2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.771209] env[68674]: DEBUG oslo_vmware.api [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 673.771209] env[68674]: value = "task-3239686" [ 673.771209] env[68674]: _type = "Task" [ 673.771209] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.780966] env[68674]: DEBUG oslo_vmware.api [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239686, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.925256] env[68674]: ERROR nova.scheduler.client.report [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] [req-38205c1d-135d-4ed2-8295-e6e877194852] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ade3f042-7427-494b-9654-0b65e074850c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-38205c1d-135d-4ed2-8295-e6e877194852"}]} [ 673.943637] env[68674]: DEBUG nova.scheduler.client.report [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Refreshing inventories for resource provider ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 673.957213] env[68674]: DEBUG nova.scheduler.client.report [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Updating ProviderTree inventory for provider ade3f042-7427-494b-9654-0b65e074850c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 673.957529] env[68674]: DEBUG nova.compute.provider_tree [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 673.974153] env[68674]: DEBUG nova.scheduler.client.report [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Refreshing aggregate associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, aggregates: None {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 673.977789] env[68674]: DEBUG nova.compute.manager [None req-06a795e5-d487-4b51-a29a-d034706d72f6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 673.977976] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-06a795e5-d487-4b51-a29a-d034706d72f6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 673.978918] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceeb03d7-3f21-43eb-9ee9-1a083e9ab6bf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.986738] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-06a795e5-d487-4b51-a29a-d034706d72f6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 673.987485] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bbdf9e08-a587-4ed1-bde1-74010e88cbbc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.992939] env[68674]: DEBUG oslo_vmware.api [None req-06a795e5-d487-4b51-a29a-d034706d72f6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for the task: (returnval){ [ 673.992939] env[68674]: value = "task-3239687" [ 673.992939] env[68674]: _type = "Task" [ 673.992939] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.997260] env[68674]: DEBUG nova.scheduler.client.report [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Refreshing trait associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 674.003068] env[68674]: DEBUG oslo_vmware.api [None req-06a795e5-d487-4b51-a29a-d034706d72f6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239687, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.003896] env[68674]: DEBUG nova.network.neutron [req-69736522-c014-4acb-83c6-e5210a12147e req-762332bb-75df-4fd9-a4b9-a0eb3385c8ee service nova] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Updated VIF entry in instance network info cache for port ad353ee1-e0ca-436b-b58e-eae548257eed. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 674.004245] env[68674]: DEBUG nova.network.neutron [req-69736522-c014-4acb-83c6-e5210a12147e req-762332bb-75df-4fd9-a4b9-a0eb3385c8ee service nova] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Updating instance_info_cache with network_info: [{"id": "ad353ee1-e0ca-436b-b58e-eae548257eed", "address": "fa:16:3e:d6:3d:f2", "network": {"id": "81c15154-8675-4a83-ac4f-bf4e5160c6a3", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-935429665-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.152", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e935d3cc470460cb5fa899412f4b0d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad353ee1-e0", "ovs_interfaceid": "ad353ee1-e0ca-436b-b58e-eae548257eed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.284644] env[68674]: DEBUG oslo_vmware.api [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239686, 'name': ReconfigVM_Task, 'duration_secs': 0.340469} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.287198] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Reconfigured VM instance instance-00000022 to attach disk [datastore2] d167585b-11f4-462c-b12e-c6a440c1476a/d167585b-11f4-462c-b12e-c6a440c1476a.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 674.288029] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e513ed48-ac4e-4e59-9390-99e94c3a69d6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.294437] env[68674]: DEBUG oslo_vmware.api [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 674.294437] env[68674]: value = "task-3239688" [ 674.294437] env[68674]: _type = "Task" [ 674.294437] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.302589] env[68674]: DEBUG oslo_vmware.api [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239688, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.504757] env[68674]: DEBUG oslo_vmware.api [None req-06a795e5-d487-4b51-a29a-d034706d72f6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239687, 'name': PowerOffVM_Task, 'duration_secs': 0.474592} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.507331] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-06a795e5-d487-4b51-a29a-d034706d72f6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 674.507505] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-06a795e5-d487-4b51-a29a-d034706d72f6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 674.507963] env[68674]: DEBUG oslo_concurrency.lockutils [req-69736522-c014-4acb-83c6-e5210a12147e req-762332bb-75df-4fd9-a4b9-a0eb3385c8ee service nova] Releasing lock "refresh_cache-6278d756-139c-4fcd-bf31-304c978d6682" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 674.509035] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-01e09867-42e3-4450-9fa5-cf5ae40bcb04 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.541838] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df15097e-c5fb-4b1b-aa64-18dbeffcc3e5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.548857] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f2327de-bbad-4bc1-a574-866f68c40253 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.581375] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d08e678-fd65-4ec5-b4fc-65df68a03718 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.584037] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-06a795e5-d487-4b51-a29a-d034706d72f6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 674.584253] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-06a795e5-d487-4b51-a29a-d034706d72f6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 674.584431] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-06a795e5-d487-4b51-a29a-d034706d72f6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Deleting the datastore file [datastore2] 160d9aa2-048d-45a2-ab55-581c8721ac3b {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 674.584666] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-23f6c097-4215-4164-b03d-e9ee7cc3cb68 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.592635] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee8b52d7-0054-4e5b-b4a4-8085213c0959 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.596360] env[68674]: DEBUG oslo_vmware.api [None req-06a795e5-d487-4b51-a29a-d034706d72f6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for the task: (returnval){ [ 674.596360] env[68674]: value = "task-3239690" [ 674.596360] env[68674]: _type = "Task" [ 674.596360] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.607022] env[68674]: DEBUG nova.compute.provider_tree [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 674.612493] env[68674]: DEBUG oslo_vmware.api [None req-06a795e5-d487-4b51-a29a-d034706d72f6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239690, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.803829] env[68674]: DEBUG oslo_vmware.api [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239688, 'name': Rename_Task, 'duration_secs': 0.188543} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.804129] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 674.804375] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-12d65760-4f5d-46d8-a1ff-037f1d2363f4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.811239] env[68674]: DEBUG oslo_vmware.api [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 674.811239] env[68674]: value = "task-3239691" [ 674.811239] env[68674]: _type = "Task" [ 674.811239] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.825616] env[68674]: DEBUG oslo_vmware.api [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239691, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.106498] env[68674]: DEBUG oslo_vmware.api [None req-06a795e5-d487-4b51-a29a-d034706d72f6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239690, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160605} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.106827] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-06a795e5-d487-4b51-a29a-d034706d72f6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 675.107086] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-06a795e5-d487-4b51-a29a-d034706d72f6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 675.107291] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-06a795e5-d487-4b51-a29a-d034706d72f6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 675.107469] env[68674]: INFO nova.compute.manager [None req-06a795e5-d487-4b51-a29a-d034706d72f6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Took 1.13 seconds to destroy the instance on the hypervisor. [ 675.107751] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-06a795e5-d487-4b51-a29a-d034706d72f6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 675.107960] env[68674]: DEBUG nova.compute.manager [-] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 675.108069] env[68674]: DEBUG nova.network.neutron [-] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 675.145815] env[68674]: DEBUG nova.scheduler.client.report [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Updated inventory for provider ade3f042-7427-494b-9654-0b65e074850c with generation 63 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 675.145815] env[68674]: DEBUG nova.compute.provider_tree [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Updating resource provider ade3f042-7427-494b-9654-0b65e074850c generation from 63 to 64 during operation: update_inventory {{(pid=68674) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 675.146035] env[68674]: DEBUG nova.compute.provider_tree [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 675.323333] env[68674]: DEBUG oslo_vmware.api [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239691, 'name': PowerOnVM_Task, 'duration_secs': 0.47365} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.324079] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 675.325525] env[68674]: INFO nova.compute.manager [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Took 7.91 seconds to spawn the instance on the hypervisor. [ 675.325743] env[68674]: DEBUG nova.compute.manager [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 675.326587] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-582182fd-add1-4346-971d-3b45b46f98b2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.440705] env[68674]: DEBUG nova.compute.manager [req-08135dde-191b-40df-86a8-d4aad210822a req-59077710-3a2d-4c80-adc1-a49bf28c3d10 service nova] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Received event network-vif-deleted-7c0ea59c-e774-45af-b163-a886f32640b1 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 675.440942] env[68674]: INFO nova.compute.manager [req-08135dde-191b-40df-86a8-d4aad210822a req-59077710-3a2d-4c80-adc1-a49bf28c3d10 service nova] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Neutron deleted interface 7c0ea59c-e774-45af-b163-a886f32640b1; detaching it from the instance and deleting it from the info cache [ 675.441187] env[68674]: DEBUG nova.network.neutron [req-08135dde-191b-40df-86a8-d4aad210822a req-59077710-3a2d-4c80-adc1-a49bf28c3d10 service nova] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.653678] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.469s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 675.654522] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 38.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 675.654884] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 675.655431] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68674) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 675.655857] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.544s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 675.657607] env[68674]: INFO nova.compute.claims [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 675.661755] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bbee920-84ec-4c08-8d15-efa4b69cddf8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.670762] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4adf60d9-7cd3-446f-814f-49e42370be75 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.685783] env[68674]: INFO nova.scheduler.client.report [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Deleted allocations for instance f9168b78-ed64-4109-84f0-db0af61d2f10 [ 675.687479] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce5ad11-6c82-4522-81a3-960739c7c1a0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.697290] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf0cb301-c282-40db-9d6f-b64094c50e48 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.730534] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178641MB free_disk=119GB free_vcpus=48 pci_devices=None {{(pid=68674) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 675.731012] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 675.854199] env[68674]: INFO nova.compute.manager [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Took 46.36 seconds to build instance. [ 675.925238] env[68674]: DEBUG nova.network.neutron [-] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.943836] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8f28940c-899a-4d9a-a876-5bc6fa6bb281 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.956250] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcc11a67-fd12-493e-872b-a7cd3751138e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.992359] env[68674]: DEBUG nova.compute.manager [req-08135dde-191b-40df-86a8-d4aad210822a req-59077710-3a2d-4c80-adc1-a49bf28c3d10 service nova] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Detach interface failed, port_id=7c0ea59c-e774-45af-b163-a886f32640b1, reason: Instance 160d9aa2-048d-45a2-ab55-581c8721ac3b could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 676.196785] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f2b33e31-74bb-417b-8fed-78b63ebeb1de tempest-ServerDiagnosticsTest-373346071 tempest-ServerDiagnosticsTest-373346071-project-member] Lock "f9168b78-ed64-4109-84f0-db0af61d2f10" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.923s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 676.355699] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1fd934b0-69d2-438e-adfc-a9f82b241bd6 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "d167585b-11f4-462c-b12e-c6a440c1476a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.016s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 676.428337] env[68674]: INFO nova.compute.manager [-] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Took 1.32 seconds to deallocate network for instance. [ 676.568357] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-061a7e6d-49db-4cd8-b2a6-eb18ff4052a1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.575239] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3555bcd4-272a-40fc-bd45-f459ecc05468 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Suspending the VM {{(pid=68674) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 676.575496] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-be91ce58-b584-4081-9c17-678f60c0ee6f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.582460] env[68674]: DEBUG oslo_vmware.api [None req-3555bcd4-272a-40fc-bd45-f459ecc05468 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 676.582460] env[68674]: value = "task-3239692" [ 676.582460] env[68674]: _type = "Task" [ 676.582460] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.591031] env[68674]: DEBUG oslo_vmware.api [None req-3555bcd4-272a-40fc-bd45-f459ecc05468 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239692, 'name': SuspendVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.860514] env[68674]: DEBUG nova.compute.manager [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 676.936633] env[68674]: DEBUG oslo_concurrency.lockutils [None req-06a795e5-d487-4b51-a29a-d034706d72f6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 677.098246] env[68674]: DEBUG oslo_vmware.api [None req-3555bcd4-272a-40fc-bd45-f459ecc05468 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239692, 'name': SuspendVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.239442] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f3bc7df-45b3-4dae-ab20-5f31cc558af3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.247157] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f3805d5-3c21-403d-b454-004e6b143ba6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.278477] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4832b7f-5e18-4f84-acde-60eadcf1c6ff {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.288209] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c236b7b-3d09-4376-b48f-80d089a6950c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.304048] env[68674]: DEBUG nova.compute.provider_tree [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 677.389901] env[68674]: DEBUG oslo_concurrency.lockutils [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 677.593197] env[68674]: DEBUG oslo_vmware.api [None req-3555bcd4-272a-40fc-bd45-f459ecc05468 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239692, 'name': SuspendVM_Task, 'duration_secs': 0.628586} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.593388] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3555bcd4-272a-40fc-bd45-f459ecc05468 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Suspended the VM {{(pid=68674) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 677.593572] env[68674]: DEBUG nova.compute.manager [None req-3555bcd4-272a-40fc-bd45-f459ecc05468 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 677.594383] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cadc6bd1-0af5-4fe2-891c-4374f601f2e2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.808417] env[68674]: DEBUG nova.scheduler.client.report [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 678.315354] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.659s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 678.316104] env[68674]: DEBUG nova.compute.manager [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 678.323787] env[68674]: DEBUG oslo_concurrency.lockutils [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.222s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 678.324291] env[68674]: INFO nova.compute.claims [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 678.823946] env[68674]: DEBUG nova.compute.utils [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 678.825809] env[68674]: DEBUG nova.compute.manager [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 678.826439] env[68674]: DEBUG nova.network.neutron [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 678.878761] env[68674]: DEBUG nova.policy [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e12ae6d61f0147dbb760e5598d24a53e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cc7acf9ab6ee4ce49cc6d971fa212411', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 679.329762] env[68674]: DEBUG nova.compute.manager [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 679.608554] env[68674]: DEBUG nova.network.neutron [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Successfully created port: 83560e44-ed5c-4f43-8a2c-483d1f512ea2 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 680.038973] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26833358-461d-4fb0-bb67-ecb0b7a0c8f7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.048350] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3881606-0bbf-499a-a380-8a25e35a5c84 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.083316] env[68674]: DEBUG nova.compute.manager [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 680.085225] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efcd10f2-687e-40a8-8920-7cfac5027777 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.088285] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c735d70-487c-4c8b-a206-fabe0efc7f6f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.100990] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cd6a4e5-ceb8-4bad-8417-f8280d6a324d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.115760] env[68674]: DEBUG nova.compute.provider_tree [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 680.356886] env[68674]: DEBUG nova.compute.manager [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 680.377368] env[68674]: DEBUG nova.virt.hardware [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 680.377624] env[68674]: DEBUG nova.virt.hardware [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 680.377780] env[68674]: DEBUG nova.virt.hardware [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 680.377963] env[68674]: DEBUG nova.virt.hardware [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 680.378124] env[68674]: DEBUG nova.virt.hardware [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 680.378274] env[68674]: DEBUG nova.virt.hardware [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 680.378481] env[68674]: DEBUG nova.virt.hardware [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 680.378723] env[68674]: DEBUG nova.virt.hardware [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 680.378866] env[68674]: DEBUG nova.virt.hardware [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 680.379043] env[68674]: DEBUG nova.virt.hardware [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 680.379221] env[68674]: DEBUG nova.virt.hardware [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 680.380087] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67a8518f-daaa-4133-9d66-788d5767002c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.388019] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b43740c1-c26e-494c-92a7-5c686c446af8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.602102] env[68674]: INFO nova.compute.manager [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] instance snapshotting [ 680.602203] env[68674]: WARNING nova.compute.manager [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 680.605016] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6631bdec-4ebe-4fe0-868d-3ec2bd9428be {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.624830] env[68674]: DEBUG nova.scheduler.client.report [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 680.628892] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed648b42-f163-4761-b627-c190ce64c650 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.047255] env[68674]: DEBUG oslo_concurrency.lockutils [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquiring lock "f029042f-d80b-453e-adc9-1e65d7da7aaf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 681.047255] env[68674]: DEBUG oslo_concurrency.lockutils [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lock "f029042f-d80b-453e-adc9-1e65d7da7aaf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 681.132819] env[68674]: DEBUG oslo_concurrency.lockutils [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.810s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 681.133386] env[68674]: DEBUG nova.compute.manager [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 681.136741] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.924s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 681.139049] env[68674]: INFO nova.compute.claims [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 681.144079] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Creating Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 681.144079] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-74288e4a-67d4-4c60-a434-6b5d8c841eed {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.159882] env[68674]: DEBUG oslo_vmware.api [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 681.159882] env[68674]: value = "task-3239693" [ 681.159882] env[68674]: _type = "Task" [ 681.159882] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.179628] env[68674]: DEBUG oslo_vmware.api [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239693, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.401330] env[68674]: DEBUG nova.network.neutron [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Successfully updated port: 83560e44-ed5c-4f43-8a2c-483d1f512ea2 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 681.449318] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquiring lock "5c12cb5d-821c-4e63-86a0-dadc9794a8ba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 681.449567] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lock "5c12cb5d-821c-4e63-86a0-dadc9794a8ba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 681.452782] env[68674]: DEBUG nova.compute.manager [req-6a8d261d-a799-4cce-ad6e-11829f2cd9ad req-6f28a058-6369-4ac0-b2b7-cd3355a97d77 service nova] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Received event network-vif-plugged-83560e44-ed5c-4f43-8a2c-483d1f512ea2 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 681.453012] env[68674]: DEBUG oslo_concurrency.lockutils [req-6a8d261d-a799-4cce-ad6e-11829f2cd9ad req-6f28a058-6369-4ac0-b2b7-cd3355a97d77 service nova] Acquiring lock "0f618d12-dc7b-4739-8ace-9453a7175d75-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 681.453276] env[68674]: DEBUG oslo_concurrency.lockutils [req-6a8d261d-a799-4cce-ad6e-11829f2cd9ad req-6f28a058-6369-4ac0-b2b7-cd3355a97d77 service nova] Lock "0f618d12-dc7b-4739-8ace-9453a7175d75-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 681.453455] env[68674]: DEBUG oslo_concurrency.lockutils [req-6a8d261d-a799-4cce-ad6e-11829f2cd9ad req-6f28a058-6369-4ac0-b2b7-cd3355a97d77 service nova] Lock "0f618d12-dc7b-4739-8ace-9453a7175d75-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 681.453580] env[68674]: DEBUG nova.compute.manager [req-6a8d261d-a799-4cce-ad6e-11829f2cd9ad req-6f28a058-6369-4ac0-b2b7-cd3355a97d77 service nova] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] No waiting events found dispatching network-vif-plugged-83560e44-ed5c-4f43-8a2c-483d1f512ea2 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 681.453721] env[68674]: WARNING nova.compute.manager [req-6a8d261d-a799-4cce-ad6e-11829f2cd9ad req-6f28a058-6369-4ac0-b2b7-cd3355a97d77 service nova] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Received unexpected event network-vif-plugged-83560e44-ed5c-4f43-8a2c-483d1f512ea2 for instance with vm_state building and task_state spawning. [ 681.647425] env[68674]: DEBUG nova.compute.utils [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 681.650231] env[68674]: DEBUG nova.compute.manager [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 681.650231] env[68674]: DEBUG nova.network.neutron [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 681.671254] env[68674]: DEBUG oslo_vmware.api [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239693, 'name': CreateSnapshot_Task, 'duration_secs': 0.504895} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.671516] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Created Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 681.672268] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21582f6f-dafe-4643-82eb-23ee8a2e1332 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.702309] env[68674]: DEBUG nova.policy [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bdc524ae6696428d9fff197d45f08506', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '53d4f20e327a4322bd4df08c51c7a194', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 681.905456] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Acquiring lock "refresh_cache-0f618d12-dc7b-4739-8ace-9453a7175d75" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.905456] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Acquired lock "refresh_cache-0f618d12-dc7b-4739-8ace-9453a7175d75" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 681.905456] env[68674]: DEBUG nova.network.neutron [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 682.057228] env[68674]: DEBUG nova.network.neutron [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Successfully created port: e4fe497b-b574-433f-98da-10989ad09255 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 682.154046] env[68674]: DEBUG nova.compute.manager [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 682.191275] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Creating linked-clone VM from snapshot {{(pid=68674) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 682.191275] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-2bcfb44f-ddba-42bc-abd7-ce2b8a4bab85 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.206326] env[68674]: DEBUG oslo_vmware.api [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 682.206326] env[68674]: value = "task-3239694" [ 682.206326] env[68674]: _type = "Task" [ 682.206326] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.218981] env[68674]: DEBUG oslo_vmware.api [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239694, 'name': CloneVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.445379] env[68674]: DEBUG nova.network.neutron [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 682.612171] env[68674]: DEBUG nova.network.neutron [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Updating instance_info_cache with network_info: [{"id": "83560e44-ed5c-4f43-8a2c-483d1f512ea2", "address": "fa:16:3e:d2:3e:07", "network": {"id": "f82002f4-7eb1-4240-818b-90533b23aec4", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2040854814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc7acf9ab6ee4ce49cc6d971fa212411", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbd7899c-c96e-47fc-9141-5803b646917a", "external-id": "nsx-vlan-transportzone-333", "segmentation_id": 333, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83560e44-ed", "ovs_interfaceid": "83560e44-ed5c-4f43-8a2c-483d1f512ea2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.712345] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42e65b59-16d2-4a6b-81cb-4cd32d9ffb46 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.720635] env[68674]: DEBUG oslo_vmware.api [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239694, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.723236] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f155f4d-5294-40d2-bfc6-7d53a668f36d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.753318] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1805bad-f4d4-407c-b08a-5f078098acb5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.760572] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c684740d-9d83-4714-8153-2c2877d1e989 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.773847] env[68674]: DEBUG nova.compute.provider_tree [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 683.115199] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Releasing lock "refresh_cache-0f618d12-dc7b-4739-8ace-9453a7175d75" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 683.115694] env[68674]: DEBUG nova.compute.manager [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Instance network_info: |[{"id": "83560e44-ed5c-4f43-8a2c-483d1f512ea2", "address": "fa:16:3e:d2:3e:07", "network": {"id": "f82002f4-7eb1-4240-818b-90533b23aec4", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2040854814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc7acf9ab6ee4ce49cc6d971fa212411", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbd7899c-c96e-47fc-9141-5803b646917a", "external-id": "nsx-vlan-transportzone-333", "segmentation_id": 333, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83560e44-ed", "ovs_interfaceid": "83560e44-ed5c-4f43-8a2c-483d1f512ea2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 683.116147] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d2:3e:07', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbd7899c-c96e-47fc-9141-5803b646917a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '83560e44-ed5c-4f43-8a2c-483d1f512ea2', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 683.123497] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 683.123706] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 683.123924] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1d4696a4-2a6e-49ac-9d1a-5283e092b42a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.144014] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 683.144014] env[68674]: value = "task-3239695" [ 683.144014] env[68674]: _type = "Task" [ 683.144014] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.151725] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239695, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.163036] env[68674]: DEBUG nova.compute.manager [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 683.185533] env[68674]: DEBUG nova.virt.hardware [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 683.185778] env[68674]: DEBUG nova.virt.hardware [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 683.185936] env[68674]: DEBUG nova.virt.hardware [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 683.186128] env[68674]: DEBUG nova.virt.hardware [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 683.186278] env[68674]: DEBUG nova.virt.hardware [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 683.186422] env[68674]: DEBUG nova.virt.hardware [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 683.186630] env[68674]: DEBUG nova.virt.hardware [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 683.186785] env[68674]: DEBUG nova.virt.hardware [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 683.186951] env[68674]: DEBUG nova.virt.hardware [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 683.187182] env[68674]: DEBUG nova.virt.hardware [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 683.187295] env[68674]: DEBUG nova.virt.hardware [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 683.188224] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b060d02-1961-40ee-9680-b48219338632 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.195974] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c06a3b8-00ce-4e22-859a-e510fafa9904 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.218854] env[68674]: DEBUG oslo_vmware.api [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239694, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.277148] env[68674]: DEBUG nova.scheduler.client.report [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 683.488392] env[68674]: DEBUG nova.compute.manager [req-d74c0a05-17b2-4f1d-a5c5-9657cdd2681a req-400edc99-3084-4c12-95d4-83d30c70ad05 service nova] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Received event network-changed-83560e44-ed5c-4f43-8a2c-483d1f512ea2 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 683.489428] env[68674]: DEBUG nova.compute.manager [req-d74c0a05-17b2-4f1d-a5c5-9657cdd2681a req-400edc99-3084-4c12-95d4-83d30c70ad05 service nova] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Refreshing instance network info cache due to event network-changed-83560e44-ed5c-4f43-8a2c-483d1f512ea2. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 683.490259] env[68674]: DEBUG oslo_concurrency.lockutils [req-d74c0a05-17b2-4f1d-a5c5-9657cdd2681a req-400edc99-3084-4c12-95d4-83d30c70ad05 service nova] Acquiring lock "refresh_cache-0f618d12-dc7b-4739-8ace-9453a7175d75" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.490568] env[68674]: DEBUG oslo_concurrency.lockutils [req-d74c0a05-17b2-4f1d-a5c5-9657cdd2681a req-400edc99-3084-4c12-95d4-83d30c70ad05 service nova] Acquired lock "refresh_cache-0f618d12-dc7b-4739-8ace-9453a7175d75" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 683.490995] env[68674]: DEBUG nova.network.neutron [req-d74c0a05-17b2-4f1d-a5c5-9657cdd2681a req-400edc99-3084-4c12-95d4-83d30c70ad05 service nova] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Refreshing network info cache for port 83560e44-ed5c-4f43-8a2c-483d1f512ea2 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 683.620624] env[68674]: DEBUG nova.network.neutron [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Successfully updated port: e4fe497b-b574-433f-98da-10989ad09255 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 683.655687] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239695, 'name': CreateVM_Task, 'duration_secs': 0.33248} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.655924] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 683.656924] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.657032] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 683.657381] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 683.657719] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c019ef1-e4fc-4403-a5db-e60fc2050816 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.662586] env[68674]: DEBUG oslo_vmware.api [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Waiting for the task: (returnval){ [ 683.662586] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d39b16-a7c3-fc29-222e-5b521f0a32a7" [ 683.662586] env[68674]: _type = "Task" [ 683.662586] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.670553] env[68674]: DEBUG oslo_vmware.api [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d39b16-a7c3-fc29-222e-5b521f0a32a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.720246] env[68674]: DEBUG oslo_vmware.api [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239694, 'name': CloneVM_Task, 'duration_secs': 1.471047} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.720519] env[68674]: INFO nova.virt.vmwareapi.vmops [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Created linked-clone VM from snapshot [ 683.721260] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a971b3a-351a-49e4-bc51-8ee873430328 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.728185] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Uploading image 8b9e9b2f-2af9-43fc-934e-ac511e4a03eb {{(pid=68674) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 683.747185] env[68674]: DEBUG oslo_vmware.rw_handles [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 683.747185] env[68674]: value = "vm-647483" [ 683.747185] env[68674]: _type = "VirtualMachine" [ 683.747185] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 683.747424] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-f95679f1-d849-4c5d-94d0-a1b91ac5c70b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.753517] env[68674]: DEBUG oslo_vmware.rw_handles [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lease: (returnval){ [ 683.753517] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a7dbaa-d7f7-b3aa-b14a-144fd1f8d50e" [ 683.753517] env[68674]: _type = "HttpNfcLease" [ 683.753517] env[68674]: } obtained for exporting VM: (result){ [ 683.753517] env[68674]: value = "vm-647483" [ 683.753517] env[68674]: _type = "VirtualMachine" [ 683.753517] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 683.753885] env[68674]: DEBUG oslo_vmware.api [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the lease: (returnval){ [ 683.753885] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a7dbaa-d7f7-b3aa-b14a-144fd1f8d50e" [ 683.753885] env[68674]: _type = "HttpNfcLease" [ 683.753885] env[68674]: } to be ready. {{(pid=68674) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 683.759624] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 683.759624] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a7dbaa-d7f7-b3aa-b14a-144fd1f8d50e" [ 683.759624] env[68674]: _type = "HttpNfcLease" [ 683.759624] env[68674]: } is initializing. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 683.782986] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.646s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 683.783533] env[68674]: DEBUG nova.compute.manager [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 683.786131] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d68cd372-617a-438a-bad8-72dff6afbef9 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.515s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 683.786369] env[68674]: DEBUG nova.objects.instance [None req-d68cd372-617a-438a-bad8-72dff6afbef9 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Lazy-loading 'resources' on Instance uuid a62237a7-a123-4378-b655-d489ef08474b {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 684.124976] env[68674]: DEBUG oslo_concurrency.lockutils [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Acquiring lock "refresh_cache-3b0837ef-53fb-4851-b69f-ee0a1d89fbf8" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.124976] env[68674]: DEBUG oslo_concurrency.lockutils [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Acquired lock "refresh_cache-3b0837ef-53fb-4851-b69f-ee0a1d89fbf8" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 684.124976] env[68674]: DEBUG nova.network.neutron [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 684.175094] env[68674]: DEBUG oslo_vmware.api [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d39b16-a7c3-fc29-222e-5b521f0a32a7, 'name': SearchDatastore_Task, 'duration_secs': 0.009428} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.175483] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 684.175623] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 684.175877] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.176025] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 684.176254] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 684.176487] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9a261020-f176-49f9-b6b2-766a65bb8a82 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.187449] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 684.187848] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 684.188431] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68f77e2c-c150-4858-9bbb-fd832e0cc1e5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.193098] env[68674]: DEBUG oslo_vmware.api [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Waiting for the task: (returnval){ [ 684.193098] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52293f61-6744-d1b7-09b3-d1caf40576b4" [ 684.193098] env[68674]: _type = "Task" [ 684.193098] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.200283] env[68674]: DEBUG oslo_vmware.api [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52293f61-6744-d1b7-09b3-d1caf40576b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.216149] env[68674]: DEBUG nova.network.neutron [req-d74c0a05-17b2-4f1d-a5c5-9657cdd2681a req-400edc99-3084-4c12-95d4-83d30c70ad05 service nova] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Updated VIF entry in instance network info cache for port 83560e44-ed5c-4f43-8a2c-483d1f512ea2. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 684.216489] env[68674]: DEBUG nova.network.neutron [req-d74c0a05-17b2-4f1d-a5c5-9657cdd2681a req-400edc99-3084-4c12-95d4-83d30c70ad05 service nova] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Updating instance_info_cache with network_info: [{"id": "83560e44-ed5c-4f43-8a2c-483d1f512ea2", "address": "fa:16:3e:d2:3e:07", "network": {"id": "f82002f4-7eb1-4240-818b-90533b23aec4", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2040854814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc7acf9ab6ee4ce49cc6d971fa212411", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbd7899c-c96e-47fc-9141-5803b646917a", "external-id": "nsx-vlan-transportzone-333", "segmentation_id": 333, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83560e44-ed", "ovs_interfaceid": "83560e44-ed5c-4f43-8a2c-483d1f512ea2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.264096] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 684.264096] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a7dbaa-d7f7-b3aa-b14a-144fd1f8d50e" [ 684.264096] env[68674]: _type = "HttpNfcLease" [ 684.264096] env[68674]: } is ready. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 684.264096] env[68674]: DEBUG oslo_vmware.rw_handles [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 684.264096] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a7dbaa-d7f7-b3aa-b14a-144fd1f8d50e" [ 684.264096] env[68674]: _type = "HttpNfcLease" [ 684.264096] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 684.264808] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63a07f6e-20bf-489b-974d-5f4337cc77d9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.271972] env[68674]: DEBUG oslo_vmware.rw_handles [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5289eedb-0457-423a-6d8b-2b7c42008d41/disk-0.vmdk from lease info. {{(pid=68674) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 684.272245] env[68674]: DEBUG oslo_vmware.rw_handles [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5289eedb-0457-423a-6d8b-2b7c42008d41/disk-0.vmdk for reading. {{(pid=68674) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 684.339078] env[68674]: DEBUG nova.compute.utils [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 684.342374] env[68674]: DEBUG nova.compute.manager [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 684.342755] env[68674]: DEBUG nova.network.neutron [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 505b0352-39ab-4841-8766-14626af2b13e] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 684.372649] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-de483568-10bb-4f64-bb96-94ef59b3fb91 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.397989] env[68674]: DEBUG nova.policy [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'abfd3423bbdb4ee9a94b31fb0f7aa860', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3fa7abd14180453bb12e9dd5fc24523f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 684.686930] env[68674]: DEBUG nova.network.neutron [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 684.704975] env[68674]: DEBUG oslo_vmware.api [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52293f61-6744-d1b7-09b3-d1caf40576b4, 'name': SearchDatastore_Task, 'duration_secs': 0.008377} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.708509] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87c85a19-3e7b-4138-9e1f-bba0bc369031 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.714069] env[68674]: DEBUG oslo_vmware.api [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Waiting for the task: (returnval){ [ 684.714069] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52aa6850-1771-2d09-3fe4-d1670dcdb1eb" [ 684.714069] env[68674]: _type = "Task" [ 684.714069] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.721094] env[68674]: DEBUG oslo_concurrency.lockutils [req-d74c0a05-17b2-4f1d-a5c5-9657cdd2681a req-400edc99-3084-4c12-95d4-83d30c70ad05 service nova] Releasing lock "refresh_cache-0f618d12-dc7b-4739-8ace-9453a7175d75" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 684.727897] env[68674]: DEBUG oslo_vmware.api [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52aa6850-1771-2d09-3fe4-d1670dcdb1eb, 'name': SearchDatastore_Task, 'duration_secs': 0.009832} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.728298] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 684.728596] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 0f618d12-dc7b-4739-8ace-9453a7175d75/0f618d12-dc7b-4739-8ace-9453a7175d75.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 684.728900] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-14f5a632-f18d-4aa1-a4f9-315c609f98a1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.739876] env[68674]: DEBUG oslo_vmware.api [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Waiting for the task: (returnval){ [ 684.739876] env[68674]: value = "task-3239697" [ 684.739876] env[68674]: _type = "Task" [ 684.739876] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.748256] env[68674]: DEBUG oslo_vmware.api [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239697, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.809867] env[68674]: DEBUG nova.network.neutron [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Successfully created port: 070f055c-efb5-4c7e-ba62-e44b000f2eeb {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 684.843952] env[68674]: DEBUG nova.compute.manager [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 684.916655] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd520090-4201-4fec-b953-a5298ab9de84 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.925784] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f9d074d-7f78-4c99-a814-4ee57599ab98 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.971425] env[68674]: DEBUG nova.network.neutron [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Updating instance_info_cache with network_info: [{"id": "e4fe497b-b574-433f-98da-10989ad09255", "address": "fa:16:3e:c8:6f:17", "network": {"id": "bf6a7e9c-de61-4657-889e-ed1e2b6157d4", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-420712588-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53d4f20e327a4322bd4df08c51c7a194", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8233b50c-be21-441a-a357-a29138a95b8b", "external-id": "nsx-vlan-transportzone-496", "segmentation_id": 496, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4fe497b-b5", "ovs_interfaceid": "e4fe497b-b574-433f-98da-10989ad09255", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.973586] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b70c522-fcc2-4119-b57f-875d3098d183 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.982989] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3269369f-beda-4a44-bcc5-e6f1cbfbcb9b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.005497] env[68674]: DEBUG nova.compute.provider_tree [None req-d68cd372-617a-438a-bad8-72dff6afbef9 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 685.250232] env[68674]: DEBUG oslo_vmware.api [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239697, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.477783] env[68674]: DEBUG oslo_concurrency.lockutils [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Releasing lock "refresh_cache-3b0837ef-53fb-4851-b69f-ee0a1d89fbf8" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 685.477871] env[68674]: DEBUG nova.compute.manager [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Instance network_info: |[{"id": "e4fe497b-b574-433f-98da-10989ad09255", "address": "fa:16:3e:c8:6f:17", "network": {"id": "bf6a7e9c-de61-4657-889e-ed1e2b6157d4", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-420712588-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53d4f20e327a4322bd4df08c51c7a194", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8233b50c-be21-441a-a357-a29138a95b8b", "external-id": "nsx-vlan-transportzone-496", "segmentation_id": 496, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4fe497b-b5", "ovs_interfaceid": "e4fe497b-b574-433f-98da-10989ad09255", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 685.478274] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c8:6f:17', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8233b50c-be21-441a-a357-a29138a95b8b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e4fe497b-b574-433f-98da-10989ad09255', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 685.487276] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Creating folder: Project (53d4f20e327a4322bd4df08c51c7a194). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 685.487511] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c39472ff-0c54-45b7-9822-170084135648 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.500033] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Created folder: Project (53d4f20e327a4322bd4df08c51c7a194) in parent group-v647377. [ 685.500282] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Creating folder: Instances. Parent ref: group-v647485. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 685.501069] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2258af9b-3541-488c-82f6-3c6c06e6b9d2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.508795] env[68674]: DEBUG nova.scheduler.client.report [None req-d68cd372-617a-438a-bad8-72dff6afbef9 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 685.514728] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Created folder: Instances in parent group-v647485. [ 685.515118] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 685.516125] env[68674]: DEBUG nova.compute.manager [req-83d7a069-9b39-4d9f-8e57-1b29d868db00 req-a66fb386-5be5-4760-9bf5-a846891614a1 service nova] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Received event network-vif-plugged-e4fe497b-b574-433f-98da-10989ad09255 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 685.517126] env[68674]: DEBUG oslo_concurrency.lockutils [req-83d7a069-9b39-4d9f-8e57-1b29d868db00 req-a66fb386-5be5-4760-9bf5-a846891614a1 service nova] Acquiring lock "3b0837ef-53fb-4851-b69f-ee0a1d89fbf8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 685.517126] env[68674]: DEBUG oslo_concurrency.lockutils [req-83d7a069-9b39-4d9f-8e57-1b29d868db00 req-a66fb386-5be5-4760-9bf5-a846891614a1 service nova] Lock "3b0837ef-53fb-4851-b69f-ee0a1d89fbf8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 685.517126] env[68674]: DEBUG oslo_concurrency.lockutils [req-83d7a069-9b39-4d9f-8e57-1b29d868db00 req-a66fb386-5be5-4760-9bf5-a846891614a1 service nova] Lock "3b0837ef-53fb-4851-b69f-ee0a1d89fbf8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 685.517126] env[68674]: DEBUG nova.compute.manager [req-83d7a069-9b39-4d9f-8e57-1b29d868db00 req-a66fb386-5be5-4760-9bf5-a846891614a1 service nova] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] No waiting events found dispatching network-vif-plugged-e4fe497b-b574-433f-98da-10989ad09255 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 685.517285] env[68674]: WARNING nova.compute.manager [req-83d7a069-9b39-4d9f-8e57-1b29d868db00 req-a66fb386-5be5-4760-9bf5-a846891614a1 service nova] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Received unexpected event network-vif-plugged-e4fe497b-b574-433f-98da-10989ad09255 for instance with vm_state building and task_state spawning. [ 685.517850] env[68674]: DEBUG nova.compute.manager [req-83d7a069-9b39-4d9f-8e57-1b29d868db00 req-a66fb386-5be5-4760-9bf5-a846891614a1 service nova] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Received event network-changed-e4fe497b-b574-433f-98da-10989ad09255 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 685.517850] env[68674]: DEBUG nova.compute.manager [req-83d7a069-9b39-4d9f-8e57-1b29d868db00 req-a66fb386-5be5-4760-9bf5-a846891614a1 service nova] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Refreshing instance network info cache due to event network-changed-e4fe497b-b574-433f-98da-10989ad09255. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 685.517850] env[68674]: DEBUG oslo_concurrency.lockutils [req-83d7a069-9b39-4d9f-8e57-1b29d868db00 req-a66fb386-5be5-4760-9bf5-a846891614a1 service nova] Acquiring lock "refresh_cache-3b0837ef-53fb-4851-b69f-ee0a1d89fbf8" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.517984] env[68674]: DEBUG oslo_concurrency.lockutils [req-83d7a069-9b39-4d9f-8e57-1b29d868db00 req-a66fb386-5be5-4760-9bf5-a846891614a1 service nova] Acquired lock "refresh_cache-3b0837ef-53fb-4851-b69f-ee0a1d89fbf8" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 685.518161] env[68674]: DEBUG nova.network.neutron [req-83d7a069-9b39-4d9f-8e57-1b29d868db00 req-a66fb386-5be5-4760-9bf5-a846891614a1 service nova] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Refreshing network info cache for port e4fe497b-b574-433f-98da-10989ad09255 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 685.519344] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 685.520709] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0a68189b-3496-4a48-a9b9-8e5ed287e4c0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.542016] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 685.542016] env[68674]: value = "task-3239700" [ 685.542016] env[68674]: _type = "Task" [ 685.542016] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.550791] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239700, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.751343] env[68674]: DEBUG oslo_vmware.api [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239697, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.551528} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.751593] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 0f618d12-dc7b-4739-8ace-9453a7175d75/0f618d12-dc7b-4739-8ace-9453a7175d75.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 685.751731] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 685.752112] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-63e3cae7-852d-4ff8-a5a4-f09447d2fd2f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.758953] env[68674]: DEBUG oslo_vmware.api [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Waiting for the task: (returnval){ [ 685.758953] env[68674]: value = "task-3239701" [ 685.758953] env[68674]: _type = "Task" [ 685.758953] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.767308] env[68674]: DEBUG oslo_vmware.api [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239701, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.856130] env[68674]: DEBUG nova.compute.manager [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 685.889068] env[68674]: DEBUG nova.virt.hardware [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 685.889068] env[68674]: DEBUG nova.virt.hardware [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 685.889068] env[68674]: DEBUG nova.virt.hardware [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 685.889363] env[68674]: DEBUG nova.virt.hardware [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 685.889363] env[68674]: DEBUG nova.virt.hardware [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 685.889363] env[68674]: DEBUG nova.virt.hardware [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 685.889363] env[68674]: DEBUG nova.virt.hardware [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 685.893091] env[68674]: DEBUG nova.virt.hardware [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 685.893739] env[68674]: DEBUG nova.virt.hardware [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 685.894205] env[68674]: DEBUG nova.virt.hardware [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 685.894568] env[68674]: DEBUG nova.virt.hardware [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 685.895834] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96e01618-9f5f-47b5-ab56-b0234a3d0472 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.906599] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a7dbcd7-63d3-433a-84b3-6901cd607e56 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.014066] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d68cd372-617a-438a-bad8-72dff6afbef9 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.227s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 686.016038] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.572s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 686.017666] env[68674]: INFO nova.compute.claims [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 686.037743] env[68674]: INFO nova.scheduler.client.report [None req-d68cd372-617a-438a-bad8-72dff6afbef9 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Deleted allocations for instance a62237a7-a123-4378-b655-d489ef08474b [ 686.052997] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239700, 'name': CreateVM_Task, 'duration_secs': 0.420394} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.053701] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 686.054111] env[68674]: DEBUG oslo_concurrency.lockutils [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.054235] env[68674]: DEBUG oslo_concurrency.lockutils [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 686.054566] env[68674]: DEBUG oslo_concurrency.lockutils [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 686.055116] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9c840fc-54a6-4cff-b211-671f6c95d864 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.060395] env[68674]: DEBUG oslo_vmware.api [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Waiting for the task: (returnval){ [ 686.060395] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5210023c-d19d-230c-43b7-2af4fe95c61c" [ 686.060395] env[68674]: _type = "Task" [ 686.060395] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.069193] env[68674]: DEBUG oslo_vmware.api [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5210023c-d19d-230c-43b7-2af4fe95c61c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.269564] env[68674]: DEBUG oslo_vmware.api [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239701, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073789} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.269823] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 686.271429] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd193a59-369e-45fd-aacb-c3d543cc1a6c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.298229] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Reconfiguring VM instance instance-00000023 to attach disk [datastore2] 0f618d12-dc7b-4739-8ace-9453a7175d75/0f618d12-dc7b-4739-8ace-9453a7175d75.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 686.298613] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4106dedd-7a06-4bc8-be7c-08479cb06d2b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.319299] env[68674]: DEBUG nova.network.neutron [req-83d7a069-9b39-4d9f-8e57-1b29d868db00 req-a66fb386-5be5-4760-9bf5-a846891614a1 service nova] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Updated VIF entry in instance network info cache for port e4fe497b-b574-433f-98da-10989ad09255. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 686.319758] env[68674]: DEBUG nova.network.neutron [req-83d7a069-9b39-4d9f-8e57-1b29d868db00 req-a66fb386-5be5-4760-9bf5-a846891614a1 service nova] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Updating instance_info_cache with network_info: [{"id": "e4fe497b-b574-433f-98da-10989ad09255", "address": "fa:16:3e:c8:6f:17", "network": {"id": "bf6a7e9c-de61-4657-889e-ed1e2b6157d4", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-420712588-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53d4f20e327a4322bd4df08c51c7a194", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8233b50c-be21-441a-a357-a29138a95b8b", "external-id": "nsx-vlan-transportzone-496", "segmentation_id": 496, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4fe497b-b5", "ovs_interfaceid": "e4fe497b-b574-433f-98da-10989ad09255", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.328218] env[68674]: DEBUG oslo_vmware.api [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Waiting for the task: (returnval){ [ 686.328218] env[68674]: value = "task-3239702" [ 686.328218] env[68674]: _type = "Task" [ 686.328218] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.338440] env[68674]: DEBUG oslo_vmware.api [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239702, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.550676] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d68cd372-617a-438a-bad8-72dff6afbef9 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Lock "a62237a7-a123-4378-b655-d489ef08474b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.810s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 686.562093] env[68674]: DEBUG nova.network.neutron [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Successfully updated port: 070f055c-efb5-4c7e-ba62-e44b000f2eeb {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 686.576361] env[68674]: DEBUG oslo_vmware.api [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5210023c-d19d-230c-43b7-2af4fe95c61c, 'name': SearchDatastore_Task, 'duration_secs': 0.025924} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.577943] env[68674]: DEBUG oslo_concurrency.lockutils [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 686.578510] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 686.578902] env[68674]: DEBUG oslo_concurrency.lockutils [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.579758] env[68674]: DEBUG oslo_concurrency.lockutils [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 686.579758] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 686.580197] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-49b8a72f-9737-4299-ae94-284ee7e3d17c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.590039] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 686.590128] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 686.590964] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-faf7fb62-1f0d-40a7-a299-261ff6f2c770 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.597031] env[68674]: DEBUG oslo_vmware.api [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Waiting for the task: (returnval){ [ 686.597031] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52eb0d01-6853-6e10-f27c-e5117ca008b5" [ 686.597031] env[68674]: _type = "Task" [ 686.597031] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.605581] env[68674]: DEBUG oslo_vmware.api [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52eb0d01-6853-6e10-f27c-e5117ca008b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.823938] env[68674]: DEBUG oslo_concurrency.lockutils [req-83d7a069-9b39-4d9f-8e57-1b29d868db00 req-a66fb386-5be5-4760-9bf5-a846891614a1 service nova] Releasing lock "refresh_cache-3b0837ef-53fb-4851-b69f-ee0a1d89fbf8" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 686.838321] env[68674]: DEBUG oslo_vmware.api [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239702, 'name': ReconfigVM_Task, 'duration_secs': 0.349097} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.838612] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Reconfigured VM instance instance-00000023 to attach disk [datastore2] 0f618d12-dc7b-4739-8ace-9453a7175d75/0f618d12-dc7b-4739-8ace-9453a7175d75.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 686.839278] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ad8dd87c-37a9-4105-9d3d-1e006e5ac564 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.847114] env[68674]: DEBUG oslo_vmware.api [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Waiting for the task: (returnval){ [ 686.847114] env[68674]: value = "task-3239703" [ 686.847114] env[68674]: _type = "Task" [ 686.847114] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.855491] env[68674]: DEBUG oslo_vmware.api [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239703, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.069717] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "refresh_cache-505b0352-39ab-4841-8766-14626af2b13e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.069843] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquired lock "refresh_cache-505b0352-39ab-4841-8766-14626af2b13e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 687.069930] env[68674]: DEBUG nova.network.neutron [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 687.114834] env[68674]: DEBUG oslo_vmware.api [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52eb0d01-6853-6e10-f27c-e5117ca008b5, 'name': SearchDatastore_Task, 'duration_secs': 0.010962} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.115171] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58cd0b3a-004c-49c4-af1e-841ea165dc84 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.123627] env[68674]: DEBUG oslo_vmware.api [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Waiting for the task: (returnval){ [ 687.123627] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52e5fc07-57cb-e024-1a6d-72eaac57a7bd" [ 687.123627] env[68674]: _type = "Task" [ 687.123627] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.132805] env[68674]: DEBUG oslo_vmware.api [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52e5fc07-57cb-e024-1a6d-72eaac57a7bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.359107] env[68674]: DEBUG oslo_vmware.api [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239703, 'name': Rename_Task, 'duration_secs': 0.161973} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.359107] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 687.359107] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3f0ceefb-df39-4ad7-a6b4-23b169de6aef {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.370178] env[68674]: DEBUG oslo_vmware.api [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Waiting for the task: (returnval){ [ 687.370178] env[68674]: value = "task-3239704" [ 687.370178] env[68674]: _type = "Task" [ 687.370178] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.380389] env[68674]: DEBUG oslo_vmware.api [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239704, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.589630] env[68674]: DEBUG nova.compute.manager [req-21820ab1-7602-4db8-b958-8b9d1af7d3e7 req-020b5b8a-f1bc-4ffc-bd28-65d0704c5424 service nova] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Received event network-vif-plugged-070f055c-efb5-4c7e-ba62-e44b000f2eeb {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 687.589939] env[68674]: DEBUG oslo_concurrency.lockutils [req-21820ab1-7602-4db8-b958-8b9d1af7d3e7 req-020b5b8a-f1bc-4ffc-bd28-65d0704c5424 service nova] Acquiring lock "505b0352-39ab-4841-8766-14626af2b13e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 687.590149] env[68674]: DEBUG oslo_concurrency.lockutils [req-21820ab1-7602-4db8-b958-8b9d1af7d3e7 req-020b5b8a-f1bc-4ffc-bd28-65d0704c5424 service nova] Lock "505b0352-39ab-4841-8766-14626af2b13e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 687.590321] env[68674]: DEBUG oslo_concurrency.lockutils [req-21820ab1-7602-4db8-b958-8b9d1af7d3e7 req-020b5b8a-f1bc-4ffc-bd28-65d0704c5424 service nova] Lock "505b0352-39ab-4841-8766-14626af2b13e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 687.590485] env[68674]: DEBUG nova.compute.manager [req-21820ab1-7602-4db8-b958-8b9d1af7d3e7 req-020b5b8a-f1bc-4ffc-bd28-65d0704c5424 service nova] [instance: 505b0352-39ab-4841-8766-14626af2b13e] No waiting events found dispatching network-vif-plugged-070f055c-efb5-4c7e-ba62-e44b000f2eeb {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 687.590675] env[68674]: WARNING nova.compute.manager [req-21820ab1-7602-4db8-b958-8b9d1af7d3e7 req-020b5b8a-f1bc-4ffc-bd28-65d0704c5424 service nova] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Received unexpected event network-vif-plugged-070f055c-efb5-4c7e-ba62-e44b000f2eeb for instance with vm_state building and task_state spawning. [ 687.590859] env[68674]: DEBUG nova.compute.manager [req-21820ab1-7602-4db8-b958-8b9d1af7d3e7 req-020b5b8a-f1bc-4ffc-bd28-65d0704c5424 service nova] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Received event network-changed-070f055c-efb5-4c7e-ba62-e44b000f2eeb {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 687.591047] env[68674]: DEBUG nova.compute.manager [req-21820ab1-7602-4db8-b958-8b9d1af7d3e7 req-020b5b8a-f1bc-4ffc-bd28-65d0704c5424 service nova] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Refreshing instance network info cache due to event network-changed-070f055c-efb5-4c7e-ba62-e44b000f2eeb. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 687.591255] env[68674]: DEBUG oslo_concurrency.lockutils [req-21820ab1-7602-4db8-b958-8b9d1af7d3e7 req-020b5b8a-f1bc-4ffc-bd28-65d0704c5424 service nova] Acquiring lock "refresh_cache-505b0352-39ab-4841-8766-14626af2b13e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.605613] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-262d7ff9-f43b-46b1-8958-6e1be082085b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.614137] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfc9e11e-4b56-4091-b759-5a16a26d9579 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.618580] env[68674]: DEBUG nova.network.neutron [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 687.656928] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31501088-2d81-4191-b635-fdfc6152be18 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.659721] env[68674]: DEBUG oslo_concurrency.lockutils [None req-faea448a-f1f7-4957-b360-c496baa5245f tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Acquiring lock "b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 687.659882] env[68674]: DEBUG oslo_concurrency.lockutils [None req-faea448a-f1f7-4957-b360-c496baa5245f tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Lock "b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 687.660093] env[68674]: DEBUG oslo_concurrency.lockutils [None req-faea448a-f1f7-4957-b360-c496baa5245f tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Acquiring lock "b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 687.660269] env[68674]: DEBUG oslo_concurrency.lockutils [None req-faea448a-f1f7-4957-b360-c496baa5245f tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Lock "b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 687.660433] env[68674]: DEBUG oslo_concurrency.lockutils [None req-faea448a-f1f7-4957-b360-c496baa5245f tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Lock "b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 687.662484] env[68674]: INFO nova.compute.manager [None req-faea448a-f1f7-4957-b360-c496baa5245f tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Terminating instance [ 687.669194] env[68674]: DEBUG oslo_vmware.api [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52e5fc07-57cb-e024-1a6d-72eaac57a7bd, 'name': SearchDatastore_Task, 'duration_secs': 0.013301} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.671412] env[68674]: DEBUG oslo_concurrency.lockutils [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 687.671684] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8/3b0837ef-53fb-4851-b69f-ee0a1d89fbf8.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 687.672015] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-df83da61-04d2-43b6-b53d-85ce5381dd1f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.674900] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64532310-a170-4554-9c11-43348b634d44 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.691451] env[68674]: DEBUG nova.compute.provider_tree [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 687.698021] env[68674]: DEBUG oslo_vmware.api [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Waiting for the task: (returnval){ [ 687.698021] env[68674]: value = "task-3239705" [ 687.698021] env[68674]: _type = "Task" [ 687.698021] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.710038] env[68674]: DEBUG oslo_vmware.api [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Task: {'id': task-3239705, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.753393] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b2ed5f58-10ae-4dc5-89b5-3c4043ddd941 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Acquiring lock "275cdfcc-06f0-4c29-b18b-55cde38480a3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 687.753682] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b2ed5f58-10ae-4dc5-89b5-3c4043ddd941 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Lock "275cdfcc-06f0-4c29-b18b-55cde38480a3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 687.753937] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b2ed5f58-10ae-4dc5-89b5-3c4043ddd941 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Acquiring lock "275cdfcc-06f0-4c29-b18b-55cde38480a3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 687.754115] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b2ed5f58-10ae-4dc5-89b5-3c4043ddd941 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Lock "275cdfcc-06f0-4c29-b18b-55cde38480a3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 687.754307] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b2ed5f58-10ae-4dc5-89b5-3c4043ddd941 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Lock "275cdfcc-06f0-4c29-b18b-55cde38480a3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 687.757465] env[68674]: INFO nova.compute.manager [None req-b2ed5f58-10ae-4dc5-89b5-3c4043ddd941 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Terminating instance [ 687.861033] env[68674]: DEBUG nova.network.neutron [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Updating instance_info_cache with network_info: [{"id": "070f055c-efb5-4c7e-ba62-e44b000f2eeb", "address": "fa:16:3e:41:4a:0c", "network": {"id": "d0e868c8-42eb-4685-8a15-4b3a8cc40530", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-379831430-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fa7abd14180453bb12e9dd5fc24523f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap070f055c-ef", "ovs_interfaceid": "070f055c-efb5-4c7e-ba62-e44b000f2eeb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.880657] env[68674]: DEBUG oslo_vmware.api [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239704, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.167392] env[68674]: DEBUG nova.compute.manager [None req-faea448a-f1f7-4957-b360-c496baa5245f tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 688.167713] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-faea448a-f1f7-4957-b360-c496baa5245f tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 688.168718] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ec516a3-b3e5-440e-bc31-e8dac5b814b5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.177848] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-faea448a-f1f7-4957-b360-c496baa5245f tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 688.178164] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6882cefe-bf24-462f-9391-5b533c8ac2e4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.185263] env[68674]: DEBUG oslo_vmware.api [None req-faea448a-f1f7-4957-b360-c496baa5245f tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Waiting for the task: (returnval){ [ 688.185263] env[68674]: value = "task-3239706" [ 688.185263] env[68674]: _type = "Task" [ 688.185263] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.195886] env[68674]: DEBUG oslo_vmware.api [None req-faea448a-f1f7-4957-b360-c496baa5245f tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239706, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.198158] env[68674]: DEBUG nova.scheduler.client.report [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 688.213172] env[68674]: DEBUG oslo_vmware.api [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Task: {'id': task-3239705, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.261223] env[68674]: DEBUG nova.compute.manager [None req-b2ed5f58-10ae-4dc5-89b5-3c4043ddd941 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 688.261512] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b2ed5f58-10ae-4dc5-89b5-3c4043ddd941 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 688.262573] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f945ee0f-4ebf-4c57-8fee-8deb0606b25b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.271223] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2ed5f58-10ae-4dc5-89b5-3c4043ddd941 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 688.271544] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-366f4a99-831a-4b50-8a62-814ea8a9b6c1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.277979] env[68674]: DEBUG oslo_vmware.api [None req-b2ed5f58-10ae-4dc5-89b5-3c4043ddd941 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Waiting for the task: (returnval){ [ 688.277979] env[68674]: value = "task-3239707" [ 688.277979] env[68674]: _type = "Task" [ 688.277979] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.288249] env[68674]: DEBUG oslo_vmware.api [None req-b2ed5f58-10ae-4dc5-89b5-3c4043ddd941 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239707, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.366396] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Releasing lock "refresh_cache-505b0352-39ab-4841-8766-14626af2b13e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 688.366829] env[68674]: DEBUG nova.compute.manager [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Instance network_info: |[{"id": "070f055c-efb5-4c7e-ba62-e44b000f2eeb", "address": "fa:16:3e:41:4a:0c", "network": {"id": "d0e868c8-42eb-4685-8a15-4b3a8cc40530", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-379831430-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fa7abd14180453bb12e9dd5fc24523f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap070f055c-ef", "ovs_interfaceid": "070f055c-efb5-4c7e-ba62-e44b000f2eeb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 688.367288] env[68674]: DEBUG oslo_concurrency.lockutils [req-21820ab1-7602-4db8-b958-8b9d1af7d3e7 req-020b5b8a-f1bc-4ffc-bd28-65d0704c5424 service nova] Acquired lock "refresh_cache-505b0352-39ab-4841-8766-14626af2b13e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 688.367365] env[68674]: DEBUG nova.network.neutron [req-21820ab1-7602-4db8-b958-8b9d1af7d3e7 req-020b5b8a-f1bc-4ffc-bd28-65d0704c5424 service nova] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Refreshing network info cache for port 070f055c-efb5-4c7e-ba62-e44b000f2eeb {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 688.368740] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:41:4a:0c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b5c34919-7d52-4a52-bab1-81af4c8182ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '070f055c-efb5-4c7e-ba62-e44b000f2eeb', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 688.377386] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 688.381810] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 688.385697] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e8f52f01-b276-4b96-a82e-ca8e53502ef7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.409014] env[68674]: DEBUG oslo_vmware.api [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239704, 'name': PowerOnVM_Task, 'duration_secs': 0.525829} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.410486] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 688.410777] env[68674]: INFO nova.compute.manager [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Took 8.05 seconds to spawn the instance on the hypervisor. [ 688.411016] env[68674]: DEBUG nova.compute.manager [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 688.411284] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 688.411284] env[68674]: value = "task-3239708" [ 688.411284] env[68674]: _type = "Task" [ 688.411284] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.412058] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f97e018-01d9-4739-a48a-23a12f87ac6d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.427720] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239708, 'name': CreateVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.686518] env[68674]: DEBUG nova.network.neutron [req-21820ab1-7602-4db8-b958-8b9d1af7d3e7 req-020b5b8a-f1bc-4ffc-bd28-65d0704c5424 service nova] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Updated VIF entry in instance network info cache for port 070f055c-efb5-4c7e-ba62-e44b000f2eeb. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 688.686896] env[68674]: DEBUG nova.network.neutron [req-21820ab1-7602-4db8-b958-8b9d1af7d3e7 req-020b5b8a-f1bc-4ffc-bd28-65d0704c5424 service nova] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Updating instance_info_cache with network_info: [{"id": "070f055c-efb5-4c7e-ba62-e44b000f2eeb", "address": "fa:16:3e:41:4a:0c", "network": {"id": "d0e868c8-42eb-4685-8a15-4b3a8cc40530", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-379831430-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fa7abd14180453bb12e9dd5fc24523f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap070f055c-ef", "ovs_interfaceid": "070f055c-efb5-4c7e-ba62-e44b000f2eeb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.696848] env[68674]: DEBUG oslo_vmware.api [None req-faea448a-f1f7-4957-b360-c496baa5245f tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239706, 'name': PowerOffVM_Task, 'duration_secs': 0.219299} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.697120] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-faea448a-f1f7-4957-b360-c496baa5245f tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 688.697289] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-faea448a-f1f7-4957-b360-c496baa5245f tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 688.697747] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e40debc-90f4-4ba1-9215-61859422dc5f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.711549] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.695s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 688.712129] env[68674]: DEBUG nova.compute.manager [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 688.722306] env[68674]: DEBUG oslo_concurrency.lockutils [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.867s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 688.722772] env[68674]: INFO nova.compute.claims [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 688.725709] env[68674]: DEBUG oslo_vmware.api [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Task: {'id': task-3239705, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.586756} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.726460] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8/3b0837ef-53fb-4851-b69f-ee0a1d89fbf8.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 688.726671] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 688.726922] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-081a2579-b18d-4e7d-94f3-85e6716eec5f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.734055] env[68674]: DEBUG oslo_vmware.api [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Waiting for the task: (returnval){ [ 688.734055] env[68674]: value = "task-3239710" [ 688.734055] env[68674]: _type = "Task" [ 688.734055] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.743412] env[68674]: DEBUG oslo_vmware.api [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Task: {'id': task-3239710, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.770020] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-faea448a-f1f7-4957-b360-c496baa5245f tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 688.773631] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-faea448a-f1f7-4957-b360-c496baa5245f tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 688.773631] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-faea448a-f1f7-4957-b360-c496baa5245f tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Deleting the datastore file [datastore1] b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 688.773631] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bebc53d6-e639-4978-bf7f-b98601479a5a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.780036] env[68674]: DEBUG oslo_vmware.api [None req-faea448a-f1f7-4957-b360-c496baa5245f tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Waiting for the task: (returnval){ [ 688.780036] env[68674]: value = "task-3239711" [ 688.780036] env[68674]: _type = "Task" [ 688.780036] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.795902] env[68674]: DEBUG oslo_vmware.api [None req-b2ed5f58-10ae-4dc5-89b5-3c4043ddd941 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239707, 'name': PowerOffVM_Task, 'duration_secs': 0.298065} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.796215] env[68674]: DEBUG oslo_vmware.api [None req-faea448a-f1f7-4957-b360-c496baa5245f tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239711, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.797204] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2ed5f58-10ae-4dc5-89b5-3c4043ddd941 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 688.797393] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b2ed5f58-10ae-4dc5-89b5-3c4043ddd941 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 688.797683] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5ff2bd06-2087-427a-ba45-592d5e65d2c1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.868467] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b2ed5f58-10ae-4dc5-89b5-3c4043ddd941 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 688.868830] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b2ed5f58-10ae-4dc5-89b5-3c4043ddd941 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 688.869090] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2ed5f58-10ae-4dc5-89b5-3c4043ddd941 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Deleting the datastore file [datastore1] 275cdfcc-06f0-4c29-b18b-55cde38480a3 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 688.869415] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f121e7be-8248-40e9-a3c9-386ea8267f53 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.877547] env[68674]: DEBUG oslo_vmware.api [None req-b2ed5f58-10ae-4dc5-89b5-3c4043ddd941 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Waiting for the task: (returnval){ [ 688.877547] env[68674]: value = "task-3239713" [ 688.877547] env[68674]: _type = "Task" [ 688.877547] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.887692] env[68674]: DEBUG oslo_vmware.api [None req-b2ed5f58-10ae-4dc5-89b5-3c4043ddd941 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239713, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.939609] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239708, 'name': CreateVM_Task, 'duration_secs': 0.37556} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.941914] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 688.942458] env[68674]: INFO nova.compute.manager [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Took 48.85 seconds to build instance. [ 688.943794] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.944194] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 688.944318] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 688.944746] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72387b3d-90ce-4086-a2ba-b43288a7ae56 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.950352] env[68674]: DEBUG oslo_vmware.api [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 688.950352] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ae845e-9132-1ebe-7495-620e47427525" [ 688.950352] env[68674]: _type = "Task" [ 688.950352] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.960647] env[68674]: DEBUG oslo_vmware.api [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ae845e-9132-1ebe-7495-620e47427525, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.192412] env[68674]: DEBUG oslo_concurrency.lockutils [req-21820ab1-7602-4db8-b958-8b9d1af7d3e7 req-020b5b8a-f1bc-4ffc-bd28-65d0704c5424 service nova] Releasing lock "refresh_cache-505b0352-39ab-4841-8766-14626af2b13e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 689.217120] env[68674]: DEBUG nova.compute.utils [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 689.221029] env[68674]: DEBUG nova.compute.manager [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Not allocating networking since 'none' was specified. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 689.246168] env[68674]: DEBUG oslo_vmware.api [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Task: {'id': task-3239710, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083778} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.246473] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 689.247397] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ab26a6c-ecc6-4e28-9ae3-ba54afe67eda {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.272393] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Reconfiguring VM instance instance-00000024 to attach disk [datastore2] 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8/3b0837ef-53fb-4851-b69f-ee0a1d89fbf8.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 689.272719] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-318af679-c439-4096-b172-c3b64f91f93e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.297486] env[68674]: DEBUG oslo_vmware.api [None req-faea448a-f1f7-4957-b360-c496baa5245f tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239711, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.210382} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.298979] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-faea448a-f1f7-4957-b360-c496baa5245f tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 689.299283] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-faea448a-f1f7-4957-b360-c496baa5245f tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 689.299544] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-faea448a-f1f7-4957-b360-c496baa5245f tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 689.299728] env[68674]: INFO nova.compute.manager [None req-faea448a-f1f7-4957-b360-c496baa5245f tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Took 1.13 seconds to destroy the instance on the hypervisor. [ 689.299986] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-faea448a-f1f7-4957-b360-c496baa5245f tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 689.300304] env[68674]: DEBUG oslo_vmware.api [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Waiting for the task: (returnval){ [ 689.300304] env[68674]: value = "task-3239714" [ 689.300304] env[68674]: _type = "Task" [ 689.300304] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.300514] env[68674]: DEBUG nova.compute.manager [-] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 689.300624] env[68674]: DEBUG nova.network.neutron [-] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 689.311175] env[68674]: DEBUG oslo_vmware.api [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Task: {'id': task-3239714, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.387773] env[68674]: DEBUG oslo_vmware.api [None req-b2ed5f58-10ae-4dc5-89b5-3c4043ddd941 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Task: {'id': task-3239713, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.2435} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.388064] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2ed5f58-10ae-4dc5-89b5-3c4043ddd941 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 689.388290] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b2ed5f58-10ae-4dc5-89b5-3c4043ddd941 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 689.388500] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b2ed5f58-10ae-4dc5-89b5-3c4043ddd941 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 689.388931] env[68674]: INFO nova.compute.manager [None req-b2ed5f58-10ae-4dc5-89b5-3c4043ddd941 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Took 1.13 seconds to destroy the instance on the hypervisor. [ 689.388999] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b2ed5f58-10ae-4dc5-89b5-3c4043ddd941 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 689.389231] env[68674]: DEBUG nova.compute.manager [-] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 689.389577] env[68674]: DEBUG nova.network.neutron [-] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 689.445330] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5f82bbb8-2382-4bb4-b3fd-17e7327903bc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Lock "0f618d12-dc7b-4739-8ace-9453a7175d75" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.002s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 689.461864] env[68674]: DEBUG oslo_vmware.api [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ae845e-9132-1ebe-7495-620e47427525, 'name': SearchDatastore_Task, 'duration_secs': 0.013474} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.463209] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 689.463564] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 689.463823] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.463979] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 689.464167] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 689.464666] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3fde542c-f0e0-4fc3-a9a9-4392138bf4ee {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.474059] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 689.474257] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 689.475013] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d821ca6d-8221-4784-9e04-dfe2108c89f1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.480567] env[68674]: DEBUG oslo_vmware.api [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 689.480567] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5201bb04-0a32-c469-5348-f0166252308a" [ 689.480567] env[68674]: _type = "Task" [ 689.480567] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.489026] env[68674]: DEBUG oslo_vmware.api [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5201bb04-0a32-c469-5348-f0166252308a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.722846] env[68674]: DEBUG nova.compute.manager [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 689.814283] env[68674]: DEBUG oslo_vmware.api [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Task: {'id': task-3239714, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.947865] env[68674]: DEBUG nova.compute.manager [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 689.992451] env[68674]: DEBUG oslo_vmware.api [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5201bb04-0a32-c469-5348-f0166252308a, 'name': SearchDatastore_Task, 'duration_secs': 0.01087} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.996195] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6609a236-2f87-494d-9451-417d04f14eb9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.002441] env[68674]: DEBUG oslo_vmware.api [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 690.002441] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a5108e-49dd-b620-9b31-e96f4961d4b1" [ 690.002441] env[68674]: _type = "Task" [ 690.002441] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.011643] env[68674]: DEBUG oslo_vmware.api [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a5108e-49dd-b620-9b31-e96f4961d4b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.319035] env[68674]: DEBUG oslo_vmware.api [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Task: {'id': task-3239714, 'name': ReconfigVM_Task, 'duration_secs': 0.526} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.319522] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Reconfigured VM instance instance-00000024 to attach disk [datastore2] 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8/3b0837ef-53fb-4851-b69f-ee0a1d89fbf8.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 690.320839] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9f75b01b-4b20-4898-8382-46c2986000cd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.327641] env[68674]: DEBUG oslo_vmware.api [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Waiting for the task: (returnval){ [ 690.327641] env[68674]: value = "task-3239715" [ 690.327641] env[68674]: _type = "Task" [ 690.327641] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.335987] env[68674]: DEBUG oslo_vmware.api [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Task: {'id': task-3239715, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.370910] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1344ca6-04f7-4643-9898-88eea3b8e74f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.379278] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9225a674-2276-43dc-a9a3-1d9a93c1793e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.410461] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7546a9e-925f-4fd2-88e1-8e4d06e72a06 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.419088] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7d75f3d-4f8e-46fa-954f-ac5b36a51a59 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.434661] env[68674]: DEBUG nova.compute.provider_tree [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 690.476012] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 690.515227] env[68674]: DEBUG oslo_vmware.api [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a5108e-49dd-b620-9b31-e96f4961d4b1, 'name': SearchDatastore_Task, 'duration_secs': 0.011236} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.515536] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 690.515833] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 505b0352-39ab-4841-8766-14626af2b13e/505b0352-39ab-4841-8766-14626af2b13e.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 690.516589] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-29e581f0-ba98-4d38-8ba8-4e00294389be {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.523469] env[68674]: DEBUG oslo_vmware.api [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 690.523469] env[68674]: value = "task-3239716" [ 690.523469] env[68674]: _type = "Task" [ 690.523469] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.531753] env[68674]: DEBUG oslo_vmware.api [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239716, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.542469] env[68674]: DEBUG nova.compute.manager [req-ba3d6194-4fdc-4817-9884-0f2a14e5b921 req-72a80e68-8c70-4b52-8e90-72f631578441 service nova] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Received event network-vif-deleted-04dca9ad-f56b-402c-b76b-3c4ecda2e500 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 690.542469] env[68674]: INFO nova.compute.manager [req-ba3d6194-4fdc-4817-9884-0f2a14e5b921 req-72a80e68-8c70-4b52-8e90-72f631578441 service nova] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Neutron deleted interface 04dca9ad-f56b-402c-b76b-3c4ecda2e500; detaching it from the instance and deleting it from the info cache [ 690.542469] env[68674]: DEBUG nova.network.neutron [req-ba3d6194-4fdc-4817-9884-0f2a14e5b921 req-72a80e68-8c70-4b52-8e90-72f631578441 service nova] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 690.563442] env[68674]: DEBUG nova.network.neutron [-] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 690.588126] env[68674]: DEBUG nova.network.neutron [-] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 690.736736] env[68674]: DEBUG nova.compute.manager [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 690.770980] env[68674]: DEBUG nova.virt.hardware [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 690.771325] env[68674]: DEBUG nova.virt.hardware [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 690.771501] env[68674]: DEBUG nova.virt.hardware [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 690.771696] env[68674]: DEBUG nova.virt.hardware [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 690.771847] env[68674]: DEBUG nova.virt.hardware [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 690.774109] env[68674]: DEBUG nova.virt.hardware [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 690.774456] env[68674]: DEBUG nova.virt.hardware [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 690.775100] env[68674]: DEBUG nova.virt.hardware [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 690.775172] env[68674]: DEBUG nova.virt.hardware [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 690.775310] env[68674]: DEBUG nova.virt.hardware [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 690.775488] env[68674]: DEBUG nova.virt.hardware [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 690.776462] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02be13a9-2773-42e4-bb72-b1bb2e41ed3f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.787970] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8385d43d-156a-4c33-b761-9bdb217f39d8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.804172] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Instance VIF info [] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 690.810486] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Creating folder: Project (967d085f44854f11ae9c37ddbbe29969). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 690.810974] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e6ca7470-7ba5-4f42-9eb7-6f0ac666b516 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.821863] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Created folder: Project (967d085f44854f11ae9c37ddbbe29969) in parent group-v647377. [ 690.822184] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Creating folder: Instances. Parent ref: group-v647489. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 690.822478] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-290283ca-3468-4166-bc5f-847907d58418 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.834578] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Created folder: Instances in parent group-v647489. [ 690.834846] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 690.835821] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 690.835821] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cedd7bb9-ef73-4b0a-863f-7aa78eafc824 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.853092] env[68674]: DEBUG oslo_vmware.api [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Task: {'id': task-3239715, 'name': Rename_Task, 'duration_secs': 0.165508} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.854022] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 690.854351] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3417a7e0-60f2-4e2e-a441-f47bca84d44b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.857662] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 690.857662] env[68674]: value = "task-3239719" [ 690.857662] env[68674]: _type = "Task" [ 690.857662] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.864727] env[68674]: DEBUG oslo_vmware.api [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Waiting for the task: (returnval){ [ 690.864727] env[68674]: value = "task-3239720" [ 690.864727] env[68674]: _type = "Task" [ 690.864727] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.868502] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239719, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.877916] env[68674]: DEBUG oslo_vmware.api [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Task: {'id': task-3239720, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.958395] env[68674]: ERROR nova.scheduler.client.report [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [req-51309e8a-c6b8-48fe-983a-37a6d2bf4a19] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ade3f042-7427-494b-9654-0b65e074850c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-51309e8a-c6b8-48fe-983a-37a6d2bf4a19"}]} [ 690.976919] env[68674]: DEBUG nova.scheduler.client.report [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Refreshing inventories for resource provider ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 690.992745] env[68674]: DEBUG nova.scheduler.client.report [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Updating ProviderTree inventory for provider ade3f042-7427-494b-9654-0b65e074850c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 690.992968] env[68674]: DEBUG nova.compute.provider_tree [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 691.009170] env[68674]: DEBUG nova.scheduler.client.report [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Refreshing aggregate associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, aggregates: None {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 691.030477] env[68674]: DEBUG nova.scheduler.client.report [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Refreshing trait associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 691.036607] env[68674]: DEBUG oslo_vmware.api [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239716, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.045460] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-470f7d2b-3a5f-43b6-9860-401baf09439c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.055703] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cb660a2-5ce9-4956-b9dd-bba015fe8c3b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.072421] env[68674]: INFO nova.compute.manager [-] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Took 1.68 seconds to deallocate network for instance. [ 691.099474] env[68674]: INFO nova.compute.manager [-] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Took 1.80 seconds to deallocate network for instance. [ 691.099968] env[68674]: DEBUG nova.compute.manager [req-ba3d6194-4fdc-4817-9884-0f2a14e5b921 req-72a80e68-8c70-4b52-8e90-72f631578441 service nova] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Detach interface failed, port_id=04dca9ad-f56b-402c-b76b-3c4ecda2e500, reason: Instance 275cdfcc-06f0-4c29-b18b-55cde38480a3 could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 691.371899] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239719, 'name': CreateVM_Task, 'duration_secs': 0.329803} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.379098] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 691.379774] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.379935] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 691.380588] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 691.380941] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d4e361a-a5a4-463c-9567-20d9318c0363 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.386033] env[68674]: DEBUG oslo_vmware.api [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Task: {'id': task-3239720, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.390483] env[68674]: DEBUG oslo_vmware.api [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Waiting for the task: (returnval){ [ 691.390483] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b0f263-5261-360d-e3ac-3468d129a3e7" [ 691.390483] env[68674]: _type = "Task" [ 691.390483] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.396864] env[68674]: DEBUG oslo_vmware.api [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b0f263-5261-360d-e3ac-3468d129a3e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.537432] env[68674]: DEBUG oslo_vmware.api [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239716, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.527427} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.537605] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 505b0352-39ab-4841-8766-14626af2b13e/505b0352-39ab-4841-8766-14626af2b13e.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 691.537689] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 691.537936] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cf2b4ec6-a0cb-4065-b6c8-576e888b3c1e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.545976] env[68674]: DEBUG oslo_vmware.api [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 691.545976] env[68674]: value = "task-3239721" [ 691.545976] env[68674]: _type = "Task" [ 691.545976] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.554974] env[68674]: DEBUG oslo_vmware.api [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239721, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.581403] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b2ed5f58-10ae-4dc5-89b5-3c4043ddd941 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 691.611122] env[68674]: DEBUG oslo_concurrency.lockutils [None req-faea448a-f1f7-4957-b360-c496baa5245f tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 691.658549] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82824443-cb7f-46bf-89f1-61152982f8f9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.666355] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-325fc7fe-3b75-485c-905a-ffc06b1651c4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.699489] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bc1412e-bee9-4d37-81e0-add16522e23a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.707764] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fd878e1-c0d9-438f-8749-4ed2c46257b7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.722912] env[68674]: DEBUG nova.compute.provider_tree [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 691.884154] env[68674]: DEBUG oslo_vmware.api [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Task: {'id': task-3239720, 'name': PowerOnVM_Task, 'duration_secs': 0.562459} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.886297] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 691.886545] env[68674]: INFO nova.compute.manager [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Took 8.72 seconds to spawn the instance on the hypervisor. [ 691.886726] env[68674]: DEBUG nova.compute.manager [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 691.887526] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed5c5e85-380d-4e02-9914-1fc4f161059b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.901051] env[68674]: DEBUG oslo_vmware.api [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b0f263-5261-360d-e3ac-3468d129a3e7, 'name': SearchDatastore_Task, 'duration_secs': 0.012755} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.905515] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 691.905515] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 691.905515] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.905515] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 691.905796] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 691.906594] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9b768a42-4b64-4803-a4c2-88a582423bb4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.917952] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 691.918160] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 691.918872] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-443e82ef-f9ec-4973-8f8f-3b55342639b7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.923707] env[68674]: DEBUG oslo_vmware.api [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Waiting for the task: (returnval){ [ 691.923707] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f60333-63c4-b333-b29c-4df448fa4a32" [ 691.923707] env[68674]: _type = "Task" [ 691.923707] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.932326] env[68674]: DEBUG oslo_vmware.api [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f60333-63c4-b333-b29c-4df448fa4a32, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.952020] env[68674]: DEBUG oslo_vmware.rw_handles [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5289eedb-0457-423a-6d8b-2b7c42008d41/disk-0.vmdk. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 691.953884] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26abb485-3431-4174-be20-4d7be6d52093 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.961425] env[68674]: DEBUG oslo_vmware.rw_handles [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5289eedb-0457-423a-6d8b-2b7c42008d41/disk-0.vmdk is in state: ready. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 691.961678] env[68674]: ERROR oslo_vmware.rw_handles [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5289eedb-0457-423a-6d8b-2b7c42008d41/disk-0.vmdk due to incomplete transfer. [ 691.962075] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-58c9b800-8774-43dd-8553-8d88a9207d55 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.968725] env[68674]: DEBUG oslo_vmware.rw_handles [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5289eedb-0457-423a-6d8b-2b7c42008d41/disk-0.vmdk. {{(pid=68674) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 691.968937] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Uploaded image 8b9e9b2f-2af9-43fc-934e-ac511e4a03eb to the Glance image server {{(pid=68674) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 691.971153] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Destroying the VM {{(pid=68674) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 691.971400] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-1cdfeb55-ee91-4549-ac3d-e71c4b225c77 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.977325] env[68674]: DEBUG oslo_vmware.api [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 691.977325] env[68674]: value = "task-3239722" [ 691.977325] env[68674]: _type = "Task" [ 691.977325] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.985218] env[68674]: DEBUG oslo_vmware.api [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239722, 'name': Destroy_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.055670] env[68674]: DEBUG oslo_vmware.api [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239721, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07446} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.055973] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 692.058657] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8b143c6-1ce1-4223-8f4a-2dd74a8e9733 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.080023] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] 505b0352-39ab-4841-8766-14626af2b13e/505b0352-39ab-4841-8766-14626af2b13e.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 692.080023] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0dadc298-858f-4076-bddd-d5019d2a4ba3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.101238] env[68674]: DEBUG oslo_vmware.api [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 692.101238] env[68674]: value = "task-3239723" [ 692.101238] env[68674]: _type = "Task" [ 692.101238] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.113331] env[68674]: DEBUG oslo_vmware.api [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239723, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.225757] env[68674]: DEBUG nova.scheduler.client.report [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 692.415847] env[68674]: INFO nova.compute.manager [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Took 47.34 seconds to build instance. [ 692.433910] env[68674]: DEBUG oslo_vmware.api [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f60333-63c4-b333-b29c-4df448fa4a32, 'name': SearchDatastore_Task, 'duration_secs': 0.00983} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.435293] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7fcfe364-d172-421a-bec2-d4ca11e578ae {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.440990] env[68674]: DEBUG oslo_vmware.api [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Waiting for the task: (returnval){ [ 692.440990] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5210dd1d-56ac-05bb-f8e5-99dd9de6e1a8" [ 692.440990] env[68674]: _type = "Task" [ 692.440990] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.449389] env[68674]: DEBUG oslo_vmware.api [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5210dd1d-56ac-05bb-f8e5-99dd9de6e1a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.487798] env[68674]: DEBUG oslo_vmware.api [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239722, 'name': Destroy_Task, 'duration_secs': 0.337231} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.487798] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Destroyed the VM [ 692.487962] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Deleting Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 692.488222] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-2a62f087-dc90-479e-adb4-0b0535f58e6c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.494548] env[68674]: DEBUG oslo_vmware.api [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 692.494548] env[68674]: value = "task-3239724" [ 692.494548] env[68674]: _type = "Task" [ 692.494548] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.499918] env[68674]: DEBUG nova.compute.manager [req-cbf65831-c757-47fa-91ee-0d3515335f91 req-83eb2352-e2dc-4341-bca9-d4043812fd33 service nova] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Received event network-changed-4f848177-8140-4862-a7f0-f901b045c157 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 692.500181] env[68674]: DEBUG nova.compute.manager [req-cbf65831-c757-47fa-91ee-0d3515335f91 req-83eb2352-e2dc-4341-bca9-d4043812fd33 service nova] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Refreshing instance network info cache due to event network-changed-4f848177-8140-4862-a7f0-f901b045c157. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 692.501063] env[68674]: DEBUG oslo_concurrency.lockutils [req-cbf65831-c757-47fa-91ee-0d3515335f91 req-83eb2352-e2dc-4341-bca9-d4043812fd33 service nova] Acquiring lock "refresh_cache-357b515d-ef37-4688-969e-f894be30edb7" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.501063] env[68674]: DEBUG oslo_concurrency.lockutils [req-cbf65831-c757-47fa-91ee-0d3515335f91 req-83eb2352-e2dc-4341-bca9-d4043812fd33 service nova] Acquired lock "refresh_cache-357b515d-ef37-4688-969e-f894be30edb7" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 692.501063] env[68674]: DEBUG nova.network.neutron [req-cbf65831-c757-47fa-91ee-0d3515335f91 req-83eb2352-e2dc-4341-bca9-d4043812fd33 service nova] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Refreshing network info cache for port 4f848177-8140-4862-a7f0-f901b045c157 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 692.506406] env[68674]: DEBUG oslo_vmware.api [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239724, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.611540] env[68674]: DEBUG oslo_vmware.api [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239723, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.732983] env[68674]: DEBUG oslo_concurrency.lockutils [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.012s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 692.733528] env[68674]: DEBUG nova.compute.manager [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 692.736692] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1062fbb4-9f13-45e5-8716-434a3511d528 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.653s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 692.736920] env[68674]: DEBUG nova.objects.instance [None req-1062fbb4-9f13-45e5-8716-434a3511d528 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Lazy-loading 'resources' on Instance uuid ae945f3f-fde8-4b25-a5bd-81014fc99690 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 692.868944] env[68674]: DEBUG nova.compute.manager [req-1f04d19c-5834-4f8b-922b-5a56d6d40eb7 req-79215261-4cb0-48f4-bedd-b10ac3eb7a7f service nova] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Received event network-vif-deleted-1602cb92-5f66-425c-a152-a0fdd777da11 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 692.869137] env[68674]: DEBUG nova.compute.manager [req-1f04d19c-5834-4f8b-922b-5a56d6d40eb7 req-79215261-4cb0-48f4-bedd-b10ac3eb7a7f service nova] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Received event network-changed-4f848177-8140-4862-a7f0-f901b045c157 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 692.869412] env[68674]: DEBUG nova.compute.manager [req-1f04d19c-5834-4f8b-922b-5a56d6d40eb7 req-79215261-4cb0-48f4-bedd-b10ac3eb7a7f service nova] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Refreshing instance network info cache due to event network-changed-4f848177-8140-4862-a7f0-f901b045c157. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 692.869644] env[68674]: DEBUG oslo_concurrency.lockutils [req-1f04d19c-5834-4f8b-922b-5a56d6d40eb7 req-79215261-4cb0-48f4-bedd-b10ac3eb7a7f service nova] Acquiring lock "refresh_cache-357b515d-ef37-4688-969e-f894be30edb7" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.917996] env[68674]: DEBUG oslo_concurrency.lockutils [None req-03564993-9041-4d14-9e5d-77f7b9c78186 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Lock "3b0837ef-53fb-4851-b69f-ee0a1d89fbf8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.553s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 692.951717] env[68674]: DEBUG oslo_vmware.api [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5210dd1d-56ac-05bb-f8e5-99dd9de6e1a8, 'name': SearchDatastore_Task, 'duration_secs': 0.010029} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.951995] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 692.952277] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 367461db-8bc4-4cf0-b7f6-f79ee2bf8589/367461db-8bc4-4cf0-b7f6-f79ee2bf8589.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 692.952532] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-15e5ca06-0507-47d5-8bce-7f8ea22229e2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.959595] env[68674]: DEBUG oslo_vmware.api [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Waiting for the task: (returnval){ [ 692.959595] env[68674]: value = "task-3239725" [ 692.959595] env[68674]: _type = "Task" [ 692.959595] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.968319] env[68674]: DEBUG oslo_vmware.api [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': task-3239725, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.008656] env[68674]: DEBUG oslo_vmware.api [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239724, 'name': RemoveSnapshot_Task, 'duration_secs': 0.444055} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.009874] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Deleted Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 693.009874] env[68674]: INFO nova.compute.manager [None req-4cf4e320-1ae3-42fd-b602-7c7a21f2e673 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Took 12.41 seconds to snapshot the instance on the hypervisor. [ 693.116793] env[68674]: DEBUG oslo_vmware.api [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239723, 'name': ReconfigVM_Task, 'duration_secs': 0.736073} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.119153] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Reconfigured VM instance instance-00000025 to attach disk [datastore1] 505b0352-39ab-4841-8766-14626af2b13e/505b0352-39ab-4841-8766-14626af2b13e.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 693.119153] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0286960a-4700-4e4e-b12a-5ee4d95fb5b8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.128676] env[68674]: DEBUG oslo_vmware.api [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 693.128676] env[68674]: value = "task-3239726" [ 693.128676] env[68674]: _type = "Task" [ 693.128676] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.148337] env[68674]: DEBUG oslo_vmware.api [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239726, 'name': Rename_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.239920] env[68674]: DEBUG nova.compute.utils [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 693.250481] env[68674]: DEBUG nova.compute.manager [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 693.250613] env[68674]: DEBUG nova.network.neutron [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 693.350275] env[68674]: DEBUG nova.policy [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fb4d388bb8df4b1b9dda5086dcea0611', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a02f7fd3043c424f92a9e23724ed8296', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 693.399858] env[68674]: DEBUG nova.network.neutron [req-cbf65831-c757-47fa-91ee-0d3515335f91 req-83eb2352-e2dc-4341-bca9-d4043812fd33 service nova] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Updated VIF entry in instance network info cache for port 4f848177-8140-4862-a7f0-f901b045c157. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 693.400294] env[68674]: DEBUG nova.network.neutron [req-cbf65831-c757-47fa-91ee-0d3515335f91 req-83eb2352-e2dc-4341-bca9-d4043812fd33 service nova] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Updating instance_info_cache with network_info: [{"id": "4f848177-8140-4862-a7f0-f901b045c157", "address": "fa:16:3e:2f:19:f3", "network": {"id": "f82002f4-7eb1-4240-818b-90533b23aec4", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2040854814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc7acf9ab6ee4ce49cc6d971fa212411", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbd7899c-c96e-47fc-9141-5803b646917a", "external-id": "nsx-vlan-transportzone-333", "segmentation_id": 333, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f848177-81", "ovs_interfaceid": "4f848177-8140-4862-a7f0-f901b045c157", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.423484] env[68674]: DEBUG nova.compute.manager [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 693.473027] env[68674]: DEBUG oslo_vmware.api [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': task-3239725, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.606240] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3df6c30b-92b4-4886-bc1f-a7bf5f789973 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "d167585b-11f4-462c-b12e-c6a440c1476a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 693.606240] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3df6c30b-92b4-4886-bc1f-a7bf5f789973 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "d167585b-11f4-462c-b12e-c6a440c1476a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 693.606240] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3df6c30b-92b4-4886-bc1f-a7bf5f789973 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "d167585b-11f4-462c-b12e-c6a440c1476a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 693.606240] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3df6c30b-92b4-4886-bc1f-a7bf5f789973 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "d167585b-11f4-462c-b12e-c6a440c1476a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 693.606497] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3df6c30b-92b4-4886-bc1f-a7bf5f789973 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "d167585b-11f4-462c-b12e-c6a440c1476a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 693.608654] env[68674]: INFO nova.compute.manager [None req-3df6c30b-92b4-4886-bc1f-a7bf5f789973 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Terminating instance [ 693.643229] env[68674]: DEBUG oslo_vmware.api [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239726, 'name': Rename_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.750720] env[68674]: DEBUG nova.compute.manager [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 693.870527] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e88278e-6922-4bfb-94e0-f978512ce129 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.879131] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce469cf4-291c-4886-9cdc-12b1597b6257 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.911750] env[68674]: DEBUG oslo_concurrency.lockutils [req-cbf65831-c757-47fa-91ee-0d3515335f91 req-83eb2352-e2dc-4341-bca9-d4043812fd33 service nova] Releasing lock "refresh_cache-357b515d-ef37-4688-969e-f894be30edb7" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 693.912974] env[68674]: DEBUG nova.network.neutron [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Successfully created port: f8fd3dc4-58cc-4298-8fe7-96a500eacace {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 693.914849] env[68674]: DEBUG oslo_concurrency.lockutils [req-1f04d19c-5834-4f8b-922b-5a56d6d40eb7 req-79215261-4cb0-48f4-bedd-b10ac3eb7a7f service nova] Acquired lock "refresh_cache-357b515d-ef37-4688-969e-f894be30edb7" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 693.915099] env[68674]: DEBUG nova.network.neutron [req-1f04d19c-5834-4f8b-922b-5a56d6d40eb7 req-79215261-4cb0-48f4-bedd-b10ac3eb7a7f service nova] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Refreshing network info cache for port 4f848177-8140-4862-a7f0-f901b045c157 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 693.917615] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c1dfc63-fa25-4a0d-9bbf-6b23ced425b6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.925076] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-788ca71d-04cb-4965-9b9a-df7e31e3c587 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.943547] env[68674]: DEBUG nova.compute.provider_tree [None req-1062fbb4-9f13-45e5-8716-434a3511d528 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 693.951017] env[68674]: DEBUG oslo_concurrency.lockutils [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 693.971304] env[68674]: DEBUG oslo_vmware.api [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': task-3239725, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.113786] env[68674]: DEBUG nova.compute.manager [None req-3df6c30b-92b4-4886-bc1f-a7bf5f789973 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 694.113910] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3df6c30b-92b4-4886-bc1f-a7bf5f789973 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 694.114853] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de13a0a6-8425-4037-bbb1-15198fe82f0a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.123213] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3df6c30b-92b4-4886-bc1f-a7bf5f789973 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 694.123477] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aa900f4b-e1be-4794-a4ff-04bbe4685701 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.138650] env[68674]: DEBUG oslo_vmware.api [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239726, 'name': Rename_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.194370] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3df6c30b-92b4-4886-bc1f-a7bf5f789973 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 694.194558] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3df6c30b-92b4-4886-bc1f-a7bf5f789973 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 694.194739] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-3df6c30b-92b4-4886-bc1f-a7bf5f789973 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Deleting the datastore file [datastore2] d167585b-11f4-462c-b12e-c6a440c1476a {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 694.195017] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eab757c6-fde0-499a-a5db-c5f504c2ec91 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.202768] env[68674]: DEBUG oslo_vmware.api [None req-3df6c30b-92b4-4886-bc1f-a7bf5f789973 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 694.202768] env[68674]: value = "task-3239728" [ 694.202768] env[68674]: _type = "Task" [ 694.202768] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.211771] env[68674]: DEBUG oslo_vmware.api [None req-3df6c30b-92b4-4886-bc1f-a7bf5f789973 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239728, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.256064] env[68674]: INFO nova.virt.block_device [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Booting with volume ad78e308-8afd-46bf-a8e5-5e31a5c091b0 at /dev/sda [ 694.317206] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "f69c5fcf-6d25-48a5-a154-c3632c76175a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.317703] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "f69c5fcf-6d25-48a5-a154-c3632c76175a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.319506] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-01ceabd0-6bce-4e01-9ba4-9c2858d6f5e4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.331653] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-781fb69c-a550-416d-b0a5-51afec416899 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.375667] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-73b9722f-7ca1-4379-8288-024d810e75e2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.384905] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3193ece0-011e-4d66-a891-123bae1b594f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.424328] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d39d3e86-b64c-47cd-aa1c-387503c74f01 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.432219] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cdca362-3060-4ceb-ae0d-e1e4e1972600 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.448877] env[68674]: DEBUG nova.scheduler.client.report [None req-1062fbb4-9f13-45e5-8716-434a3511d528 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 694.452047] env[68674]: DEBUG nova.virt.block_device [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Updating existing volume attachment record: a8b963bc-967c-42e7-8dd2-02238a0cd37f {{(pid=68674) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 694.477307] env[68674]: DEBUG oslo_vmware.api [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': task-3239725, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.513994} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.477712] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 367461db-8bc4-4cf0-b7f6-f79ee2bf8589/367461db-8bc4-4cf0-b7f6-f79ee2bf8589.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 694.478020] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 694.479029] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3e47ddc8-64bc-4307-abf6-26c7fac58cb5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.485442] env[68674]: DEBUG oslo_vmware.api [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Waiting for the task: (returnval){ [ 694.485442] env[68674]: value = "task-3239729" [ 694.485442] env[68674]: _type = "Task" [ 694.485442] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.495764] env[68674]: DEBUG oslo_vmware.api [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': task-3239729, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.661647] env[68674]: DEBUG oslo_vmware.api [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239726, 'name': Rename_Task, 'duration_secs': 1.250106} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.661982] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 694.662222] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b534c7ca-49ba-4476-9a6e-8ecefd5026f2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.673646] env[68674]: DEBUG oslo_vmware.api [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 694.673646] env[68674]: value = "task-3239730" [ 694.673646] env[68674]: _type = "Task" [ 694.673646] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.687665] env[68674]: DEBUG oslo_vmware.api [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239730, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.714499] env[68674]: DEBUG oslo_vmware.api [None req-3df6c30b-92b4-4886-bc1f-a7bf5f789973 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3239728, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.344511} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.714758] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-3df6c30b-92b4-4886-bc1f-a7bf5f789973 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 694.714946] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3df6c30b-92b4-4886-bc1f-a7bf5f789973 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 694.715141] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3df6c30b-92b4-4886-bc1f-a7bf5f789973 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 694.715318] env[68674]: INFO nova.compute.manager [None req-3df6c30b-92b4-4886-bc1f-a7bf5f789973 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Took 0.60 seconds to destroy the instance on the hypervisor. [ 694.715557] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3df6c30b-92b4-4886-bc1f-a7bf5f789973 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 694.715749] env[68674]: DEBUG nova.compute.manager [-] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 694.715869] env[68674]: DEBUG nova.network.neutron [-] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 694.730772] env[68674]: DEBUG nova.network.neutron [req-1f04d19c-5834-4f8b-922b-5a56d6d40eb7 req-79215261-4cb0-48f4-bedd-b10ac3eb7a7f service nova] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Updated VIF entry in instance network info cache for port 4f848177-8140-4862-a7f0-f901b045c157. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 694.731190] env[68674]: DEBUG nova.network.neutron [req-1f04d19c-5834-4f8b-922b-5a56d6d40eb7 req-79215261-4cb0-48f4-bedd-b10ac3eb7a7f service nova] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Updating instance_info_cache with network_info: [{"id": "4f848177-8140-4862-a7f0-f901b045c157", "address": "fa:16:3e:2f:19:f3", "network": {"id": "f82002f4-7eb1-4240-818b-90533b23aec4", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2040854814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc7acf9ab6ee4ce49cc6d971fa212411", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbd7899c-c96e-47fc-9141-5803b646917a", "external-id": "nsx-vlan-transportzone-333", "segmentation_id": 333, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f848177-81", "ovs_interfaceid": "4f848177-8140-4862-a7f0-f901b045c157", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.865591] env[68674]: DEBUG nova.compute.manager [req-8aa1a28d-80fc-426a-a7c4-dc60137d0774 req-058d6804-cae0-4a6c-979b-0a2e0a99e3e7 service nova] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Received event network-changed-83560e44-ed5c-4f43-8a2c-483d1f512ea2 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 694.865827] env[68674]: DEBUG nova.compute.manager [req-8aa1a28d-80fc-426a-a7c4-dc60137d0774 req-058d6804-cae0-4a6c-979b-0a2e0a99e3e7 service nova] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Refreshing instance network info cache due to event network-changed-83560e44-ed5c-4f43-8a2c-483d1f512ea2. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 694.866272] env[68674]: DEBUG oslo_concurrency.lockutils [req-8aa1a28d-80fc-426a-a7c4-dc60137d0774 req-058d6804-cae0-4a6c-979b-0a2e0a99e3e7 service nova] Acquiring lock "refresh_cache-0f618d12-dc7b-4739-8ace-9453a7175d75" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.866440] env[68674]: DEBUG oslo_concurrency.lockutils [req-8aa1a28d-80fc-426a-a7c4-dc60137d0774 req-058d6804-cae0-4a6c-979b-0a2e0a99e3e7 service nova] Acquired lock "refresh_cache-0f618d12-dc7b-4739-8ace-9453a7175d75" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 694.866720] env[68674]: DEBUG nova.network.neutron [req-8aa1a28d-80fc-426a-a7c4-dc60137d0774 req-058d6804-cae0-4a6c-979b-0a2e0a99e3e7 service nova] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Refreshing network info cache for port 83560e44-ed5c-4f43-8a2c-483d1f512ea2 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 694.950175] env[68674]: DEBUG nova.compute.manager [req-5baffe69-c53e-441b-9ff7-9381b5142d80 req-8cc73949-b536-4c46-88a7-e6ce6c545250 service nova] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Received event network-changed-83560e44-ed5c-4f43-8a2c-483d1f512ea2 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 694.950375] env[68674]: DEBUG nova.compute.manager [req-5baffe69-c53e-441b-9ff7-9381b5142d80 req-8cc73949-b536-4c46-88a7-e6ce6c545250 service nova] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Refreshing instance network info cache due to event network-changed-83560e44-ed5c-4f43-8a2c-483d1f512ea2. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 694.950585] env[68674]: DEBUG oslo_concurrency.lockutils [req-5baffe69-c53e-441b-9ff7-9381b5142d80 req-8cc73949-b536-4c46-88a7-e6ce6c545250 service nova] Acquiring lock "refresh_cache-0f618d12-dc7b-4739-8ace-9453a7175d75" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.956367] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1062fbb4-9f13-45e5-8716-434a3511d528 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.219s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 694.959635] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 34.443s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.959635] env[68674]: DEBUG nova.objects.instance [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68674) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 694.983905] env[68674]: INFO nova.scheduler.client.report [None req-1062fbb4-9f13-45e5-8716-434a3511d528 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Deleted allocations for instance ae945f3f-fde8-4b25-a5bd-81014fc99690 [ 695.001136] env[68674]: DEBUG oslo_vmware.api [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': task-3239729, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.117867} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.001443] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 695.002321] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c25276f-c741-4f9b-9052-f144b7263d08 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.033265] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] 367461db-8bc4-4cf0-b7f6-f79ee2bf8589/367461db-8bc4-4cf0-b7f6-f79ee2bf8589.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 695.037765] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f97bd137-beba-45a7-a85a-12216b3bc2a9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.064608] env[68674]: DEBUG oslo_vmware.api [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Waiting for the task: (returnval){ [ 695.064608] env[68674]: value = "task-3239731" [ 695.064608] env[68674]: _type = "Task" [ 695.064608] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.073058] env[68674]: DEBUG oslo_vmware.api [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': task-3239731, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.183146] env[68674]: DEBUG oslo_vmware.api [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239730, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.234530] env[68674]: DEBUG oslo_concurrency.lockutils [req-1f04d19c-5834-4f8b-922b-5a56d6d40eb7 req-79215261-4cb0-48f4-bedd-b10ac3eb7a7f service nova] Releasing lock "refresh_cache-357b515d-ef37-4688-969e-f894be30edb7" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 695.340862] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dcabb9af-8eba-4b2e-8020-70ff5c57acbc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Acquiring lock "0f618d12-dc7b-4739-8ace-9453a7175d75" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 695.340862] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dcabb9af-8eba-4b2e-8020-70ff5c57acbc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Lock "0f618d12-dc7b-4739-8ace-9453a7175d75" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 695.340862] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dcabb9af-8eba-4b2e-8020-70ff5c57acbc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Acquiring lock "0f618d12-dc7b-4739-8ace-9453a7175d75-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 695.340862] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dcabb9af-8eba-4b2e-8020-70ff5c57acbc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Lock "0f618d12-dc7b-4739-8ace-9453a7175d75-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 695.341178] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dcabb9af-8eba-4b2e-8020-70ff5c57acbc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Lock "0f618d12-dc7b-4739-8ace-9453a7175d75-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 695.342160] env[68674]: INFO nova.compute.manager [None req-dcabb9af-8eba-4b2e-8020-70ff5c57acbc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Terminating instance [ 695.500511] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1062fbb4-9f13-45e5-8716-434a3511d528 tempest-ServersTestManualDisk-1211595029 tempest-ServersTestManualDisk-1211595029-project-member] Lock "ae945f3f-fde8-4b25-a5bd-81014fc99690" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.499s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 695.555437] env[68674]: DEBUG nova.network.neutron [-] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.576257] env[68674]: DEBUG oslo_vmware.api [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': task-3239731, 'name': ReconfigVM_Task, 'duration_secs': 0.39762} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.576737] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Reconfigured VM instance instance-00000026 to attach disk [datastore1] 367461db-8bc4-4cf0-b7f6-f79ee2bf8589/367461db-8bc4-4cf0-b7f6-f79ee2bf8589.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 695.577364] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e955c6d7-0cfa-47c0-abd3-cc3e2f98a5a2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.587351] env[68674]: DEBUG oslo_vmware.api [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Waiting for the task: (returnval){ [ 695.587351] env[68674]: value = "task-3239732" [ 695.587351] env[68674]: _type = "Task" [ 695.587351] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.601378] env[68674]: DEBUG oslo_vmware.api [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': task-3239732, 'name': Rename_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.684521] env[68674]: DEBUG oslo_vmware.api [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239730, 'name': PowerOnVM_Task, 'duration_secs': 0.975466} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.684794] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 695.684997] env[68674]: INFO nova.compute.manager [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Took 9.82 seconds to spawn the instance on the hypervisor. [ 695.685423] env[68674]: DEBUG nova.compute.manager [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 695.686182] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a78ff02-e8e1-464a-9fef-a5a1c38a35d5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.722019] env[68674]: DEBUG nova.network.neutron [req-8aa1a28d-80fc-426a-a7c4-dc60137d0774 req-058d6804-cae0-4a6c-979b-0a2e0a99e3e7 service nova] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Updated VIF entry in instance network info cache for port 83560e44-ed5c-4f43-8a2c-483d1f512ea2. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 695.723635] env[68674]: DEBUG nova.network.neutron [req-8aa1a28d-80fc-426a-a7c4-dc60137d0774 req-058d6804-cae0-4a6c-979b-0a2e0a99e3e7 service nova] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Updating instance_info_cache with network_info: [{"id": "83560e44-ed5c-4f43-8a2c-483d1f512ea2", "address": "fa:16:3e:d2:3e:07", "network": {"id": "f82002f4-7eb1-4240-818b-90533b23aec4", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2040854814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc7acf9ab6ee4ce49cc6d971fa212411", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbd7899c-c96e-47fc-9141-5803b646917a", "external-id": "nsx-vlan-transportzone-333", "segmentation_id": 333, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83560e44-ed", "ovs_interfaceid": "83560e44-ed5c-4f43-8a2c-483d1f512ea2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.846331] env[68674]: DEBUG nova.compute.manager [None req-dcabb9af-8eba-4b2e-8020-70ff5c57acbc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 695.846770] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-dcabb9af-8eba-4b2e-8020-70ff5c57acbc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 695.849628] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca57c93-f38a-4880-a3af-54ae8a7eb3d5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.852728] env[68674]: DEBUG nova.network.neutron [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Successfully updated port: f8fd3dc4-58cc-4298-8fe7-96a500eacace {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 695.861142] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcabb9af-8eba-4b2e-8020-70ff5c57acbc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 695.861142] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-df709a71-5337-4052-a79c-17df098f9a62 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.867707] env[68674]: DEBUG oslo_vmware.api [None req-dcabb9af-8eba-4b2e-8020-70ff5c57acbc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Waiting for the task: (returnval){ [ 695.867707] env[68674]: value = "task-3239733" [ 695.867707] env[68674]: _type = "Task" [ 695.867707] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.880789] env[68674]: DEBUG oslo_vmware.api [None req-dcabb9af-8eba-4b2e-8020-70ff5c57acbc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239733, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.971414] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25d46ecc-ef51-4dd7-9a30-257e4fc3c156 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 695.972676] env[68674]: DEBUG oslo_concurrency.lockutils [None req-de4dd0d6-e6a7-4cdb-ad29-57349afb13f1 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.644s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 695.972919] env[68674]: DEBUG nova.objects.instance [None req-de4dd0d6-e6a7-4cdb-ad29-57349afb13f1 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Lazy-loading 'resources' on Instance uuid 0097c367-bb3a-4b7b-9fcc-b3e3482689e2 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 696.057813] env[68674]: INFO nova.compute.manager [-] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Took 1.34 seconds to deallocate network for instance. [ 696.096359] env[68674]: DEBUG nova.compute.manager [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 696.096940] env[68674]: DEBUG nova.virt.hardware [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 696.097219] env[68674]: DEBUG nova.virt.hardware [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 696.097412] env[68674]: DEBUG nova.virt.hardware [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 696.097631] env[68674]: DEBUG nova.virt.hardware [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 696.097807] env[68674]: DEBUG nova.virt.hardware [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 696.097985] env[68674]: DEBUG nova.virt.hardware [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 696.098277] env[68674]: DEBUG nova.virt.hardware [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 696.098497] env[68674]: DEBUG nova.virt.hardware [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 696.098663] env[68674]: DEBUG nova.virt.hardware [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 696.098829] env[68674]: DEBUG nova.virt.hardware [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 696.099017] env[68674]: DEBUG nova.virt.hardware [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 696.103701] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-694f493e-4d5b-44ed-bdb4-9f6df68cc495 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.107660] env[68674]: DEBUG oslo_vmware.api [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': task-3239732, 'name': Rename_Task, 'duration_secs': 0.20815} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.108278] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 696.109010] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-07024225-bcb4-4c26-ba42-eebc1a61faa2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.115914] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e0eef3e-1be2-4d7e-ad6b-5378fa37d332 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.124405] env[68674]: DEBUG oslo_vmware.api [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Waiting for the task: (returnval){ [ 696.124405] env[68674]: value = "task-3239734" [ 696.124405] env[68674]: _type = "Task" [ 696.124405] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.146372] env[68674]: DEBUG oslo_vmware.api [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': task-3239734, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.207023] env[68674]: INFO nova.compute.manager [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Took 46.01 seconds to build instance. [ 696.225713] env[68674]: DEBUG oslo_concurrency.lockutils [req-8aa1a28d-80fc-426a-a7c4-dc60137d0774 req-058d6804-cae0-4a6c-979b-0a2e0a99e3e7 service nova] Releasing lock "refresh_cache-0f618d12-dc7b-4739-8ace-9453a7175d75" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 696.226195] env[68674]: DEBUG oslo_concurrency.lockutils [req-5baffe69-c53e-441b-9ff7-9381b5142d80 req-8cc73949-b536-4c46-88a7-e6ce6c545250 service nova] Acquired lock "refresh_cache-0f618d12-dc7b-4739-8ace-9453a7175d75" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 696.226417] env[68674]: DEBUG nova.network.neutron [req-5baffe69-c53e-441b-9ff7-9381b5142d80 req-8cc73949-b536-4c46-88a7-e6ce6c545250 service nova] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Refreshing network info cache for port 83560e44-ed5c-4f43-8a2c-483d1f512ea2 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 696.357255] env[68674]: DEBUG oslo_concurrency.lockutils [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Acquiring lock "refresh_cache-1189fa93-608b-4684-a675-f1caf29a9f43" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 696.357427] env[68674]: DEBUG oslo_concurrency.lockutils [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Acquired lock "refresh_cache-1189fa93-608b-4684-a675-f1caf29a9f43" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 696.357583] env[68674]: DEBUG nova.network.neutron [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 696.382800] env[68674]: DEBUG oslo_vmware.api [None req-dcabb9af-8eba-4b2e-8020-70ff5c57acbc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239733, 'name': PowerOffVM_Task, 'duration_secs': 0.303309} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.386293] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcabb9af-8eba-4b2e-8020-70ff5c57acbc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 696.386293] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-dcabb9af-8eba-4b2e-8020-70ff5c57acbc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 696.386293] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c4eba9c4-ba9d-4de2-aa3e-6c34932326ff {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.488277] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-dcabb9af-8eba-4b2e-8020-70ff5c57acbc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 696.488516] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-dcabb9af-8eba-4b2e-8020-70ff5c57acbc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 696.488752] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-dcabb9af-8eba-4b2e-8020-70ff5c57acbc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Deleting the datastore file [datastore2] 0f618d12-dc7b-4739-8ace-9453a7175d75 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 696.489488] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-12542cd2-fa47-42f6-b6a2-8a1a9175b2c9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.495487] env[68674]: DEBUG oslo_vmware.api [None req-dcabb9af-8eba-4b2e-8020-70ff5c57acbc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Waiting for the task: (returnval){ [ 696.495487] env[68674]: value = "task-3239736" [ 696.495487] env[68674]: _type = "Task" [ 696.495487] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.503716] env[68674]: DEBUG oslo_vmware.api [None req-dcabb9af-8eba-4b2e-8020-70ff5c57acbc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239736, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.570559] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3df6c30b-92b4-4886-bc1f-a7bf5f789973 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 696.633605] env[68674]: DEBUG oslo_vmware.api [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': task-3239734, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.707396] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7d8c7575-041e-4a75-a536-06ee8302c4c3 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "505b0352-39ab-4841-8766-14626af2b13e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.105s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 696.885746] env[68674]: DEBUG oslo_concurrency.lockutils [None req-48e3ab96-4c1e-4f55-8aab-f5d818ea7655 tempest-ServersAdminTestJSON-1212255269 tempest-ServersAdminTestJSON-1212255269-project-admin] Acquiring lock "refresh_cache-505b0352-39ab-4841-8766-14626af2b13e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 696.886413] env[68674]: DEBUG oslo_concurrency.lockutils [None req-48e3ab96-4c1e-4f55-8aab-f5d818ea7655 tempest-ServersAdminTestJSON-1212255269 tempest-ServersAdminTestJSON-1212255269-project-admin] Acquired lock "refresh_cache-505b0352-39ab-4841-8766-14626af2b13e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 696.888173] env[68674]: DEBUG nova.network.neutron [None req-48e3ab96-4c1e-4f55-8aab-f5d818ea7655 tempest-ServersAdminTestJSON-1212255269 tempest-ServersAdminTestJSON-1212255269-project-admin] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 696.903491] env[68674]: DEBUG nova.network.neutron [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 697.005419] env[68674]: DEBUG oslo_vmware.api [None req-dcabb9af-8eba-4b2e-8020-70ff5c57acbc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239736, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134416} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.005698] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-dcabb9af-8eba-4b2e-8020-70ff5c57acbc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 697.005880] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-dcabb9af-8eba-4b2e-8020-70ff5c57acbc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 697.006166] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-dcabb9af-8eba-4b2e-8020-70ff5c57acbc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 697.006344] env[68674]: INFO nova.compute.manager [None req-dcabb9af-8eba-4b2e-8020-70ff5c57acbc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Took 1.16 seconds to destroy the instance on the hypervisor. [ 697.006587] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dcabb9af-8eba-4b2e-8020-70ff5c57acbc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 697.006792] env[68674]: DEBUG nova.compute.manager [-] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 697.006868] env[68674]: DEBUG nova.network.neutron [-] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 697.079185] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21e34286-107a-426b-afc5-67bb3260d085 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.082782] env[68674]: DEBUG nova.network.neutron [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Updating instance_info_cache with network_info: [{"id": "f8fd3dc4-58cc-4298-8fe7-96a500eacace", "address": "fa:16:3e:64:ee:1d", "network": {"id": "65cdab1a-fb43-4865-af7f-680ee8a72fc3", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1526099151-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a02f7fd3043c424f92a9e23724ed8296", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "84aee122-f630-43c5-9cc1-3a38d3819c82", "external-id": "nsx-vlan-transportzone-816", "segmentation_id": 816, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8fd3dc4-58", "ovs_interfaceid": "f8fd3dc4-58cc-4298-8fe7-96a500eacace", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 697.096634] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a0cc9b3-23fd-4ff9-b1e8-54651cab1b7d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.141012] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0074718-d533-447a-96eb-bc1aaf6a8e39 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.149893] env[68674]: DEBUG oslo_vmware.api [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': task-3239734, 'name': PowerOnVM_Task, 'duration_secs': 0.637435} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.152071] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 697.152286] env[68674]: INFO nova.compute.manager [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Took 6.42 seconds to spawn the instance on the hypervisor. [ 697.152469] env[68674]: DEBUG nova.compute.manager [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 697.153796] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb19a024-fec9-42fb-9ed7-9fcc3b88de23 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.157123] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3634ce8-29aa-4592-a6d5-cd67981687ba {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.179399] env[68674]: DEBUG nova.compute.provider_tree [None req-de4dd0d6-e6a7-4cdb-ad29-57349afb13f1 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 697.182404] env[68674]: DEBUG nova.compute.manager [req-23678573-bd77-4581-9230-f4352412f8bf req-69da5323-6161-4bce-b98b-1c4dc03d940a service nova] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Received event network-vif-deleted-b42ed2ab-15a8-42e4-b62c-bb3188c711e0 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 697.182404] env[68674]: DEBUG nova.compute.manager [req-23678573-bd77-4581-9230-f4352412f8bf req-69da5323-6161-4bce-b98b-1c4dc03d940a service nova] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Received event network-changed-4f848177-8140-4862-a7f0-f901b045c157 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 697.182404] env[68674]: DEBUG nova.compute.manager [req-23678573-bd77-4581-9230-f4352412f8bf req-69da5323-6161-4bce-b98b-1c4dc03d940a service nova] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Refreshing instance network info cache due to event network-changed-4f848177-8140-4862-a7f0-f901b045c157. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 697.182551] env[68674]: DEBUG oslo_concurrency.lockutils [req-23678573-bd77-4581-9230-f4352412f8bf req-69da5323-6161-4bce-b98b-1c4dc03d940a service nova] Acquiring lock "refresh_cache-357b515d-ef37-4688-969e-f894be30edb7" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.182635] env[68674]: DEBUG oslo_concurrency.lockutils [req-23678573-bd77-4581-9230-f4352412f8bf req-69da5323-6161-4bce-b98b-1c4dc03d940a service nova] Acquired lock "refresh_cache-357b515d-ef37-4688-969e-f894be30edb7" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 697.182750] env[68674]: DEBUG nova.network.neutron [req-23678573-bd77-4581-9230-f4352412f8bf req-69da5323-6161-4bce-b98b-1c4dc03d940a service nova] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Refreshing network info cache for port 4f848177-8140-4862-a7f0-f901b045c157 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 697.210225] env[68674]: DEBUG nova.compute.manager [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 697.307636] env[68674]: DEBUG nova.network.neutron [req-5baffe69-c53e-441b-9ff7-9381b5142d80 req-8cc73949-b536-4c46-88a7-e6ce6c545250 service nova] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Updated VIF entry in instance network info cache for port 83560e44-ed5c-4f43-8a2c-483d1f512ea2. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 697.308012] env[68674]: DEBUG nova.network.neutron [req-5baffe69-c53e-441b-9ff7-9381b5142d80 req-8cc73949-b536-4c46-88a7-e6ce6c545250 service nova] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Updating instance_info_cache with network_info: [{"id": "83560e44-ed5c-4f43-8a2c-483d1f512ea2", "address": "fa:16:3e:d2:3e:07", "network": {"id": "f82002f4-7eb1-4240-818b-90533b23aec4", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2040854814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc7acf9ab6ee4ce49cc6d971fa212411", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbd7899c-c96e-47fc-9141-5803b646917a", "external-id": "nsx-vlan-transportzone-333", "segmentation_id": 333, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83560e44-ed", "ovs_interfaceid": "83560e44-ed5c-4f43-8a2c-483d1f512ea2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 697.393569] env[68674]: DEBUG nova.compute.manager [req-71fa0b88-59bb-4c40-9f2a-2da13280210e req-33c27a39-1251-4231-9810-913cb20e7af4 service nova] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Received event network-vif-plugged-f8fd3dc4-58cc-4298-8fe7-96a500eacace {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 697.394011] env[68674]: DEBUG oslo_concurrency.lockutils [req-71fa0b88-59bb-4c40-9f2a-2da13280210e req-33c27a39-1251-4231-9810-913cb20e7af4 service nova] Acquiring lock "1189fa93-608b-4684-a675-f1caf29a9f43-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.394011] env[68674]: DEBUG oslo_concurrency.lockutils [req-71fa0b88-59bb-4c40-9f2a-2da13280210e req-33c27a39-1251-4231-9810-913cb20e7af4 service nova] Lock "1189fa93-608b-4684-a675-f1caf29a9f43-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.394562] env[68674]: DEBUG oslo_concurrency.lockutils [req-71fa0b88-59bb-4c40-9f2a-2da13280210e req-33c27a39-1251-4231-9810-913cb20e7af4 service nova] Lock "1189fa93-608b-4684-a675-f1caf29a9f43-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 697.394562] env[68674]: DEBUG nova.compute.manager [req-71fa0b88-59bb-4c40-9f2a-2da13280210e req-33c27a39-1251-4231-9810-913cb20e7af4 service nova] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] No waiting events found dispatching network-vif-plugged-f8fd3dc4-58cc-4298-8fe7-96a500eacace {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 697.394562] env[68674]: WARNING nova.compute.manager [req-71fa0b88-59bb-4c40-9f2a-2da13280210e req-33c27a39-1251-4231-9810-913cb20e7af4 service nova] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Received unexpected event network-vif-plugged-f8fd3dc4-58cc-4298-8fe7-96a500eacace for instance with vm_state building and task_state spawning. [ 697.394686] env[68674]: DEBUG nova.compute.manager [req-71fa0b88-59bb-4c40-9f2a-2da13280210e req-33c27a39-1251-4231-9810-913cb20e7af4 service nova] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Received event network-changed-f8fd3dc4-58cc-4298-8fe7-96a500eacace {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 697.394815] env[68674]: DEBUG nova.compute.manager [req-71fa0b88-59bb-4c40-9f2a-2da13280210e req-33c27a39-1251-4231-9810-913cb20e7af4 service nova] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Refreshing instance network info cache due to event network-changed-f8fd3dc4-58cc-4298-8fe7-96a500eacace. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 697.395179] env[68674]: DEBUG oslo_concurrency.lockutils [req-71fa0b88-59bb-4c40-9f2a-2da13280210e req-33c27a39-1251-4231-9810-913cb20e7af4 service nova] Acquiring lock "refresh_cache-1189fa93-608b-4684-a675-f1caf29a9f43" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.592333] env[68674]: DEBUG oslo_concurrency.lockutils [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Releasing lock "refresh_cache-1189fa93-608b-4684-a675-f1caf29a9f43" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 697.592686] env[68674]: DEBUG nova.compute.manager [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Instance network_info: |[{"id": "f8fd3dc4-58cc-4298-8fe7-96a500eacace", "address": "fa:16:3e:64:ee:1d", "network": {"id": "65cdab1a-fb43-4865-af7f-680ee8a72fc3", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1526099151-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a02f7fd3043c424f92a9e23724ed8296", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "84aee122-f630-43c5-9cc1-3a38d3819c82", "external-id": "nsx-vlan-transportzone-816", "segmentation_id": 816, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8fd3dc4-58", "ovs_interfaceid": "f8fd3dc4-58cc-4298-8fe7-96a500eacace", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 697.592997] env[68674]: DEBUG oslo_concurrency.lockutils [req-71fa0b88-59bb-4c40-9f2a-2da13280210e req-33c27a39-1251-4231-9810-913cb20e7af4 service nova] Acquired lock "refresh_cache-1189fa93-608b-4684-a675-f1caf29a9f43" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 697.593507] env[68674]: DEBUG nova.network.neutron [req-71fa0b88-59bb-4c40-9f2a-2da13280210e req-33c27a39-1251-4231-9810-913cb20e7af4 service nova] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Refreshing network info cache for port f8fd3dc4-58cc-4298-8fe7-96a500eacace {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 697.595263] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:64:ee:1d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '84aee122-f630-43c5-9cc1-3a38d3819c82', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f8fd3dc4-58cc-4298-8fe7-96a500eacace', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 697.611131] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Creating folder: Project (a02f7fd3043c424f92a9e23724ed8296). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 697.614649] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-86259d54-0592-4b38-a0a3-307927d1619b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.631063] env[68674]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 697.631287] env[68674]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=68674) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 697.631655] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Folder already exists: Project (a02f7fd3043c424f92a9e23724ed8296). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 697.631788] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Creating folder: Instances. Parent ref: group-v647438. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 697.632055] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b337ce42-33ee-4470-a878-401d1f49beb0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.643686] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Created folder: Instances in parent group-v647438. [ 697.643947] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 697.644172] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 697.644376] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-11d72384-c889-4c5e-8469-8d0404437e61 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.674841] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 697.674841] env[68674]: value = "task-3239739" [ 697.674841] env[68674]: _type = "Task" [ 697.674841] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.687965] env[68674]: DEBUG nova.scheduler.client.report [None req-de4dd0d6-e6a7-4cdb-ad29-57349afb13f1 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 697.697081] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239739, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.700068] env[68674]: INFO nova.compute.manager [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Took 44.27 seconds to build instance. [ 697.747658] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.769322] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c68cd6cd-e996-4619-ab4d-f9499c53b819 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Acquiring lock "3b0837ef-53fb-4851-b69f-ee0a1d89fbf8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.769631] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c68cd6cd-e996-4619-ab4d-f9499c53b819 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Lock "3b0837ef-53fb-4851-b69f-ee0a1d89fbf8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.769853] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c68cd6cd-e996-4619-ab4d-f9499c53b819 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Acquiring lock "3b0837ef-53fb-4851-b69f-ee0a1d89fbf8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.770057] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c68cd6cd-e996-4619-ab4d-f9499c53b819 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Lock "3b0837ef-53fb-4851-b69f-ee0a1d89fbf8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.770231] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c68cd6cd-e996-4619-ab4d-f9499c53b819 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Lock "3b0837ef-53fb-4851-b69f-ee0a1d89fbf8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 697.776328] env[68674]: INFO nova.compute.manager [None req-c68cd6cd-e996-4619-ab4d-f9499c53b819 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Terminating instance [ 697.810630] env[68674]: DEBUG oslo_concurrency.lockutils [req-5baffe69-c53e-441b-9ff7-9381b5142d80 req-8cc73949-b536-4c46-88a7-e6ce6c545250 service nova] Releasing lock "refresh_cache-0f618d12-dc7b-4739-8ace-9453a7175d75" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 697.979381] env[68674]: DEBUG nova.network.neutron [req-71fa0b88-59bb-4c40-9f2a-2da13280210e req-33c27a39-1251-4231-9810-913cb20e7af4 service nova] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Updated VIF entry in instance network info cache for port f8fd3dc4-58cc-4298-8fe7-96a500eacace. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 697.979381] env[68674]: DEBUG nova.network.neutron [req-71fa0b88-59bb-4c40-9f2a-2da13280210e req-33c27a39-1251-4231-9810-913cb20e7af4 service nova] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Updating instance_info_cache with network_info: [{"id": "f8fd3dc4-58cc-4298-8fe7-96a500eacace", "address": "fa:16:3e:64:ee:1d", "network": {"id": "65cdab1a-fb43-4865-af7f-680ee8a72fc3", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1526099151-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a02f7fd3043c424f92a9e23724ed8296", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "84aee122-f630-43c5-9cc1-3a38d3819c82", "external-id": "nsx-vlan-transportzone-816", "segmentation_id": 816, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8fd3dc4-58", "ovs_interfaceid": "f8fd3dc4-58cc-4298-8fe7-96a500eacace", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.027477] env[68674]: DEBUG nova.network.neutron [None req-48e3ab96-4c1e-4f55-8aab-f5d818ea7655 tempest-ServersAdminTestJSON-1212255269 tempest-ServersAdminTestJSON-1212255269-project-admin] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Updating instance_info_cache with network_info: [{"id": "070f055c-efb5-4c7e-ba62-e44b000f2eeb", "address": "fa:16:3e:41:4a:0c", "network": {"id": "d0e868c8-42eb-4685-8a15-4b3a8cc40530", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-379831430-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fa7abd14180453bb12e9dd5fc24523f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap070f055c-ef", "ovs_interfaceid": "070f055c-efb5-4c7e-ba62-e44b000f2eeb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.113948] env[68674]: DEBUG nova.network.neutron [req-23678573-bd77-4581-9230-f4352412f8bf req-69da5323-6161-4bce-b98b-1c4dc03d940a service nova] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Updated VIF entry in instance network info cache for port 4f848177-8140-4862-a7f0-f901b045c157. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 698.114474] env[68674]: DEBUG nova.network.neutron [req-23678573-bd77-4581-9230-f4352412f8bf req-69da5323-6161-4bce-b98b-1c4dc03d940a service nova] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Updating instance_info_cache with network_info: [{"id": "4f848177-8140-4862-a7f0-f901b045c157", "address": "fa:16:3e:2f:19:f3", "network": {"id": "f82002f4-7eb1-4240-818b-90533b23aec4", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2040854814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc7acf9ab6ee4ce49cc6d971fa212411", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbd7899c-c96e-47fc-9141-5803b646917a", "external-id": "nsx-vlan-transportzone-333", "segmentation_id": 333, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f848177-81", "ovs_interfaceid": "4f848177-8140-4862-a7f0-f901b045c157", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.186935] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239739, 'name': CreateVM_Task, 'duration_secs': 0.307656} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.190249] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 698.190249] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'device_type': None, 'attachment_id': 'a8b963bc-967c-42e7-8dd2-02238a0cd37f', 'mount_device': '/dev/sda', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647448', 'volume_id': 'ad78e308-8afd-46bf-a8e5-5e31a5c091b0', 'name': 'volume-ad78e308-8afd-46bf-a8e5-5e31a5c091b0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1189fa93-608b-4684-a675-f1caf29a9f43', 'attached_at': '', 'detached_at': '', 'volume_id': 'ad78e308-8afd-46bf-a8e5-5e31a5c091b0', 'serial': 'ad78e308-8afd-46bf-a8e5-5e31a5c091b0'}, 'boot_index': 0, 'disk_bus': None, 'guest_format': None, 'delete_on_termination': True, 'volume_type': None}], 'swap': None} {{(pid=68674) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 698.190249] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Root volume attach. Driver type: vmdk {{(pid=68674) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 698.190482] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bb7eb4c-771f-4206-8d35-e3703d77bd74 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.200350] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81fa67c5-cd5b-4f76-a48a-18a5fa3baa1f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.203412] env[68674]: DEBUG oslo_concurrency.lockutils [None req-de4dd0d6-e6a7-4cdb-ad29-57349afb13f1 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.231s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 698.206233] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7e9a0e2e-6931-4ca0-ab82-71a363c895ce tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Lock "367461db-8bc4-4cf0-b7f6-f79ee2bf8589" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.775s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 698.206233] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.839s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 698.207438] env[68674]: INFO nova.compute.claims [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 698.215056] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31814ba0-6a2e-4594-9610-13a2da67e0d4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.221890] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-62cfb730-e648-4e01-b6a5-7a688513a8dd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.224841] env[68674]: DEBUG nova.network.neutron [-] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.231338] env[68674]: DEBUG oslo_vmware.api [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Waiting for the task: (returnval){ [ 698.231338] env[68674]: value = "task-3239740" [ 698.231338] env[68674]: _type = "Task" [ 698.231338] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.241031] env[68674]: DEBUG oslo_vmware.api [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Task: {'id': task-3239740, 'name': RelocateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.243685] env[68674]: INFO nova.scheduler.client.report [None req-de4dd0d6-e6a7-4cdb-ad29-57349afb13f1 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Deleted allocations for instance 0097c367-bb3a-4b7b-9fcc-b3e3482689e2 [ 698.283776] env[68674]: DEBUG nova.compute.manager [None req-c68cd6cd-e996-4619-ab4d-f9499c53b819 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 698.285428] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c68cd6cd-e996-4619-ab4d-f9499c53b819 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 698.289022] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44092dcd-7fbc-46bc-b47a-0348941bd109 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.299217] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c68cd6cd-e996-4619-ab4d-f9499c53b819 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 698.299480] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-76dbbee0-dd00-4b92-bd43-ac3e701d3691 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.307265] env[68674]: DEBUG oslo_vmware.api [None req-c68cd6cd-e996-4619-ab4d-f9499c53b819 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Waiting for the task: (returnval){ [ 698.307265] env[68674]: value = "task-3239741" [ 698.307265] env[68674]: _type = "Task" [ 698.307265] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.318494] env[68674]: DEBUG oslo_vmware.api [None req-c68cd6cd-e996-4619-ab4d-f9499c53b819 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Task: {'id': task-3239741, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.483791] env[68674]: DEBUG oslo_concurrency.lockutils [req-71fa0b88-59bb-4c40-9f2a-2da13280210e req-33c27a39-1251-4231-9810-913cb20e7af4 service nova] Releasing lock "refresh_cache-1189fa93-608b-4684-a675-f1caf29a9f43" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 698.533015] env[68674]: DEBUG oslo_concurrency.lockutils [None req-48e3ab96-4c1e-4f55-8aab-f5d818ea7655 tempest-ServersAdminTestJSON-1212255269 tempest-ServersAdminTestJSON-1212255269-project-admin] Releasing lock "refresh_cache-505b0352-39ab-4841-8766-14626af2b13e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 698.533702] env[68674]: DEBUG nova.compute.manager [None req-48e3ab96-4c1e-4f55-8aab-f5d818ea7655 tempest-ServersAdminTestJSON-1212255269 tempest-ServersAdminTestJSON-1212255269-project-admin] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Inject network info {{(pid=68674) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 698.533702] env[68674]: DEBUG nova.compute.manager [None req-48e3ab96-4c1e-4f55-8aab-f5d818ea7655 tempest-ServersAdminTestJSON-1212255269 tempest-ServersAdminTestJSON-1212255269-project-admin] [instance: 505b0352-39ab-4841-8766-14626af2b13e] network_info to inject: |[{"id": "070f055c-efb5-4c7e-ba62-e44b000f2eeb", "address": "fa:16:3e:41:4a:0c", "network": {"id": "d0e868c8-42eb-4685-8a15-4b3a8cc40530", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-379831430-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fa7abd14180453bb12e9dd5fc24523f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap070f055c-ef", "ovs_interfaceid": "070f055c-efb5-4c7e-ba62-e44b000f2eeb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 698.541534] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-48e3ab96-4c1e-4f55-8aab-f5d818ea7655 tempest-ServersAdminTestJSON-1212255269 tempest-ServersAdminTestJSON-1212255269-project-admin] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Reconfiguring VM instance to set the machine id {{(pid=68674) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 698.541534] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e644aee2-8dc0-405b-af26-38d3c3ea6b0b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.558647] env[68674]: DEBUG oslo_vmware.api [None req-48e3ab96-4c1e-4f55-8aab-f5d818ea7655 tempest-ServersAdminTestJSON-1212255269 tempest-ServersAdminTestJSON-1212255269-project-admin] Waiting for the task: (returnval){ [ 698.558647] env[68674]: value = "task-3239742" [ 698.558647] env[68674]: _type = "Task" [ 698.558647] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.574022] env[68674]: DEBUG oslo_vmware.api [None req-48e3ab96-4c1e-4f55-8aab-f5d818ea7655 tempest-ServersAdminTestJSON-1212255269 tempest-ServersAdminTestJSON-1212255269-project-admin] Task: {'id': task-3239742, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.619014] env[68674]: DEBUG oslo_concurrency.lockutils [req-23678573-bd77-4581-9230-f4352412f8bf req-69da5323-6161-4bce-b98b-1c4dc03d940a service nova] Releasing lock "refresh_cache-357b515d-ef37-4688-969e-f894be30edb7" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 698.715127] env[68674]: DEBUG nova.compute.manager [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 698.731024] env[68674]: INFO nova.compute.manager [-] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Took 1.72 seconds to deallocate network for instance. [ 698.756021] env[68674]: DEBUG oslo_concurrency.lockutils [None req-de4dd0d6-e6a7-4cdb-ad29-57349afb13f1 tempest-ServersNegativeTestMultiTenantJSON-2096645388 tempest-ServersNegativeTestMultiTenantJSON-2096645388-project-member] Lock "0097c367-bb3a-4b7b-9fcc-b3e3482689e2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.896s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 698.758631] env[68674]: DEBUG oslo_vmware.api [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Task: {'id': task-3239740, 'name': RelocateVM_Task} progress is 42%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.823146] env[68674]: DEBUG oslo_vmware.api [None req-c68cd6cd-e996-4619-ab4d-f9499c53b819 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Task: {'id': task-3239741, 'name': PowerOffVM_Task, 'duration_secs': 0.289492} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.823908] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c68cd6cd-e996-4619-ab4d-f9499c53b819 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 698.823908] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c68cd6cd-e996-4619-ab4d-f9499c53b819 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 698.823908] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e6dd8431-498a-4ddd-b7aa-8c17064e29c2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.905380] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c68cd6cd-e996-4619-ab4d-f9499c53b819 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 698.905616] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c68cd6cd-e996-4619-ab4d-f9499c53b819 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 698.905811] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-c68cd6cd-e996-4619-ab4d-f9499c53b819 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Deleting the datastore file [datastore2] 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 698.906096] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9e640ec2-a7a7-4a68-9dc9-1d9dfa1ff691 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.912858] env[68674]: DEBUG oslo_vmware.api [None req-c68cd6cd-e996-4619-ab4d-f9499c53b819 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Waiting for the task: (returnval){ [ 698.912858] env[68674]: value = "task-3239744" [ 698.912858] env[68674]: _type = "Task" [ 698.912858] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.922845] env[68674]: DEBUG oslo_vmware.api [None req-c68cd6cd-e996-4619-ab4d-f9499c53b819 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Task: {'id': task-3239744, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.069695] env[68674]: DEBUG oslo_vmware.api [None req-48e3ab96-4c1e-4f55-8aab-f5d818ea7655 tempest-ServersAdminTestJSON-1212255269 tempest-ServersAdminTestJSON-1212255269-project-admin] Task: {'id': task-3239742, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.236265] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dcabb9af-8eba-4b2e-8020-70ff5c57acbc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 699.248585] env[68674]: DEBUG oslo_vmware.api [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Task: {'id': task-3239740, 'name': RelocateVM_Task} progress is 54%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.256707] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 699.426631] env[68674]: DEBUG oslo_vmware.api [None req-c68cd6cd-e996-4619-ab4d-f9499c53b819 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Task: {'id': task-3239744, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.574573] env[68674]: DEBUG oslo_vmware.api [None req-48e3ab96-4c1e-4f55-8aab-f5d818ea7655 tempest-ServersAdminTestJSON-1212255269 tempest-ServersAdminTestJSON-1212255269-project-admin] Task: {'id': task-3239742, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.676929] env[68674]: DEBUG nova.compute.manager [req-d31d8f68-b2bc-4810-b939-c97f57468323 req-29b71fcd-d856-4bcf-86a0-f62354c41321 service nova] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Received event network-changed-4f848177-8140-4862-a7f0-f901b045c157 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 699.677814] env[68674]: DEBUG nova.compute.manager [req-d31d8f68-b2bc-4810-b939-c97f57468323 req-29b71fcd-d856-4bcf-86a0-f62354c41321 service nova] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Refreshing instance network info cache due to event network-changed-4f848177-8140-4862-a7f0-f901b045c157. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 699.677814] env[68674]: DEBUG oslo_concurrency.lockutils [req-d31d8f68-b2bc-4810-b939-c97f57468323 req-29b71fcd-d856-4bcf-86a0-f62354c41321 service nova] Acquiring lock "refresh_cache-357b515d-ef37-4688-969e-f894be30edb7" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.677814] env[68674]: DEBUG oslo_concurrency.lockutils [req-d31d8f68-b2bc-4810-b939-c97f57468323 req-29b71fcd-d856-4bcf-86a0-f62354c41321 service nova] Acquired lock "refresh_cache-357b515d-ef37-4688-969e-f894be30edb7" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 699.677814] env[68674]: DEBUG nova.network.neutron [req-d31d8f68-b2bc-4810-b939-c97f57468323 req-29b71fcd-d856-4bcf-86a0-f62354c41321 service nova] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Refreshing network info cache for port 4f848177-8140-4862-a7f0-f901b045c157 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 699.755051] env[68674]: DEBUG oslo_vmware.api [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Task: {'id': task-3239740, 'name': RelocateVM_Task} progress is 69%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.926622] env[68674]: DEBUG oslo_vmware.api [None req-c68cd6cd-e996-4619-ab4d-f9499c53b819 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Task: {'id': task-3239744, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.985785] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2fa6ba9-798a-479e-a4fa-52a5ac155075 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.995187] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52900e6e-ac63-4f14-a135-3281bd44cffa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.039026] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03672bdc-e2e5-45b4-9913-ddcb393dd681 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.048727] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9862b2fe-d840-42bb-a58e-5e572034323d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.076339] env[68674]: DEBUG nova.compute.provider_tree [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 700.095061] env[68674]: DEBUG oslo_vmware.api [None req-48e3ab96-4c1e-4f55-8aab-f5d818ea7655 tempest-ServersAdminTestJSON-1212255269 tempest-ServersAdminTestJSON-1212255269-project-admin] Task: {'id': task-3239742, 'name': ReconfigVM_Task, 'duration_secs': 1.181402} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.095061] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-48e3ab96-4c1e-4f55-8aab-f5d818ea7655 tempest-ServersAdminTestJSON-1212255269 tempest-ServersAdminTestJSON-1212255269-project-admin] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Reconfigured VM instance to set the machine id {{(pid=68674) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 700.250439] env[68674]: DEBUG oslo_vmware.api [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Task: {'id': task-3239740, 'name': RelocateVM_Task} progress is 82%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.425054] env[68674]: DEBUG oslo_vmware.api [None req-c68cd6cd-e996-4619-ab4d-f9499c53b819 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Task: {'id': task-3239744, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.566188] env[68674]: DEBUG nova.network.neutron [req-d31d8f68-b2bc-4810-b939-c97f57468323 req-29b71fcd-d856-4bcf-86a0-f62354c41321 service nova] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Updated VIF entry in instance network info cache for port 4f848177-8140-4862-a7f0-f901b045c157. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 700.566188] env[68674]: DEBUG nova.network.neutron [req-d31d8f68-b2bc-4810-b939-c97f57468323 req-29b71fcd-d856-4bcf-86a0-f62354c41321 service nova] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Updating instance_info_cache with network_info: [{"id": "4f848177-8140-4862-a7f0-f901b045c157", "address": "fa:16:3e:2f:19:f3", "network": {"id": "f82002f4-7eb1-4240-818b-90533b23aec4", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2040854814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc7acf9ab6ee4ce49cc6d971fa212411", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbd7899c-c96e-47fc-9141-5803b646917a", "external-id": "nsx-vlan-transportzone-333", "segmentation_id": 333, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f848177-81", "ovs_interfaceid": "4f848177-8140-4862-a7f0-f901b045c157", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.586633] env[68674]: DEBUG nova.scheduler.client.report [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 700.751759] env[68674]: DEBUG oslo_vmware.api [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Task: {'id': task-3239740, 'name': RelocateVM_Task} progress is 97%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.924441] env[68674]: DEBUG oslo_vmware.api [None req-c68cd6cd-e996-4619-ab4d-f9499c53b819 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Task: {'id': task-3239744, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.767425} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.924658] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-c68cd6cd-e996-4619-ab4d-f9499c53b819 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 700.924920] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c68cd6cd-e996-4619-ab4d-f9499c53b819 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 700.925239] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c68cd6cd-e996-4619-ab4d-f9499c53b819 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 700.925459] env[68674]: INFO nova.compute.manager [None req-c68cd6cd-e996-4619-ab4d-f9499c53b819 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Took 2.64 seconds to destroy the instance on the hypervisor. [ 700.925767] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c68cd6cd-e996-4619-ab4d-f9499c53b819 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 700.926041] env[68674]: DEBUG nova.compute.manager [-] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 700.926175] env[68674]: DEBUG nova.network.neutron [-] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 700.985725] env[68674]: INFO nova.compute.manager [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Rebuilding instance [ 701.026257] env[68674]: DEBUG nova.compute.manager [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 701.026942] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b75080-6022-41e0-80a5-0998df736024 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.069337] env[68674]: DEBUG oslo_concurrency.lockutils [req-d31d8f68-b2bc-4810-b939-c97f57468323 req-29b71fcd-d856-4bcf-86a0-f62354c41321 service nova] Releasing lock "refresh_cache-357b515d-ef37-4688-969e-f894be30edb7" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 701.069624] env[68674]: DEBUG nova.compute.manager [req-d31d8f68-b2bc-4810-b939-c97f57468323 req-29b71fcd-d856-4bcf-86a0-f62354c41321 service nova] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Received event network-vif-deleted-83560e44-ed5c-4f43-8a2c-483d1f512ea2 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 701.092428] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.886s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 701.092945] env[68674]: DEBUG nova.compute.manager [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 701.095530] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.160s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 701.095747] env[68674]: DEBUG nova.objects.instance [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Lazy-loading 'resources' on Instance uuid f45200cd-6cb0-498a-8858-1e70177031d9 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 701.249706] env[68674]: DEBUG oslo_vmware.api [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Task: {'id': task-3239740, 'name': RelocateVM_Task} progress is 97%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.450933] env[68674]: DEBUG oslo_concurrency.lockutils [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Acquiring lock "1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 701.451339] env[68674]: DEBUG oslo_concurrency.lockutils [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Lock "1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 701.603017] env[68674]: DEBUG nova.compute.utils [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 701.606090] env[68674]: DEBUG nova.compute.manager [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 701.606090] env[68674]: DEBUG nova.network.neutron [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 701.668230] env[68674]: DEBUG nova.policy [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '50e46e8c9fbc4778b5f89359ae81bfa3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f6b179855b874365964446f95f9f5a53', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 701.725180] env[68674]: DEBUG nova.network.neutron [-] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 701.755646] env[68674]: DEBUG oslo_vmware.api [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Task: {'id': task-3239740, 'name': RelocateVM_Task} progress is 98%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.831868] env[68674]: DEBUG nova.compute.manager [req-1c02c990-4e69-4dc0-9500-e4e3f70db4b1 req-214b68c4-4c77-403e-899b-3a54dcbf5987 service nova] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Received event network-vif-deleted-e4fe497b-b574-433f-98da-10989ad09255 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 702.017363] env[68674]: DEBUG nova.network.neutron [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Successfully created port: 61791433-aab7-4244-91a2-6caef49a0717 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 702.045239] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 702.046422] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3d4d470f-7f3b-4631-be9a-167d7de98116 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.054559] env[68674]: DEBUG oslo_vmware.api [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Waiting for the task: (returnval){ [ 702.054559] env[68674]: value = "task-3239745" [ 702.054559] env[68674]: _type = "Task" [ 702.054559] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.067258] env[68674]: DEBUG oslo_vmware.api [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': task-3239745, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.111922] env[68674]: DEBUG nova.compute.manager [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 702.230401] env[68674]: INFO nova.compute.manager [-] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Took 1.30 seconds to deallocate network for instance. [ 702.255692] env[68674]: DEBUG oslo_vmware.api [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Task: {'id': task-3239740, 'name': RelocateVM_Task, 'duration_secs': 3.601107} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.259879] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Volume attach. Driver type: vmdk {{(pid=68674) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 702.259879] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647448', 'volume_id': 'ad78e308-8afd-46bf-a8e5-5e31a5c091b0', 'name': 'volume-ad78e308-8afd-46bf-a8e5-5e31a5c091b0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1189fa93-608b-4684-a675-f1caf29a9f43', 'attached_at': '', 'detached_at': '', 'volume_id': 'ad78e308-8afd-46bf-a8e5-5e31a5c091b0', 'serial': 'ad78e308-8afd-46bf-a8e5-5e31a5c091b0'} {{(pid=68674) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 702.259879] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-722eeee6-25d0-4979-91a6-ef0448982947 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.279179] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-182ce6d2-7327-4f01-b79b-e55d5b4a6448 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.302666] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] volume-ad78e308-8afd-46bf-a8e5-5e31a5c091b0/volume-ad78e308-8afd-46bf-a8e5-5e31a5c091b0.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 702.308835] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-31c09a3d-64a0-407f-939a-853b57f62c73 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.321609] env[68674]: INFO nova.compute.manager [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Rebuilding instance [ 702.328018] env[68674]: DEBUG oslo_vmware.api [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Waiting for the task: (returnval){ [ 702.328018] env[68674]: value = "task-3239746" [ 702.328018] env[68674]: _type = "Task" [ 702.328018] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.339907] env[68674]: DEBUG oslo_vmware.api [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Task: {'id': task-3239746, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.343914] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-848267a1-cf5b-4806-bf8d-f687730409a5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.354539] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eebcd86-9bd9-4fa0-a906-78215e745397 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.393625] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50465c1e-82cb-49d5-ae70-e5fccfc0c9f2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.401171] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27a713b6-f223-4e35-b3fb-cad4ceee8559 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.414597] env[68674]: DEBUG nova.compute.provider_tree [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 702.428338] env[68674]: DEBUG nova.compute.manager [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 702.429177] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c15c3ba1-c48e-49dc-b22d-da2d0e767ddb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.566213] env[68674]: DEBUG oslo_vmware.api [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': task-3239745, 'name': PowerOffVM_Task, 'duration_secs': 0.274584} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.566213] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 702.566601] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 702.567166] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5321958-5a33-42b6-ae56-a6e9730c3da5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.574235] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 702.574471] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a0300a4e-b873-452e-ab29-6144ed8243b1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.600775] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 702.600775] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 702.600775] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Deleting the datastore file [datastore1] 367461db-8bc4-4cf0-b7f6-f79ee2bf8589 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 702.600775] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-52d5ff3a-9f05-4d2e-8e37-4377b6ab79df {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.607367] env[68674]: DEBUG oslo_vmware.api [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Waiting for the task: (returnval){ [ 702.607367] env[68674]: value = "task-3239748" [ 702.607367] env[68674]: _type = "Task" [ 702.607367] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.615650] env[68674]: DEBUG oslo_vmware.api [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': task-3239748, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.738815] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c68cd6cd-e996-4619-ab4d-f9499c53b819 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 702.837984] env[68674]: DEBUG oslo_vmware.api [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Task: {'id': task-3239746, 'name': ReconfigVM_Task, 'duration_secs': 0.433669} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.837984] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Reconfigured VM instance instance-00000027 to attach disk [datastore1] volume-ad78e308-8afd-46bf-a8e5-5e31a5c091b0/volume-ad78e308-8afd-46bf-a8e5-5e31a5c091b0.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 702.842695] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-94d4e222-7fc9-492f-8ec1-f63904a94db7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.859675] env[68674]: DEBUG oslo_vmware.api [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Waiting for the task: (returnval){ [ 702.859675] env[68674]: value = "task-3239749" [ 702.859675] env[68674]: _type = "Task" [ 702.859675] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.868796] env[68674]: DEBUG oslo_vmware.api [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Task: {'id': task-3239749, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.951121] env[68674]: ERROR nova.scheduler.client.report [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [req-00a238a5-8e8a-48e8-9e5e-93bc6fa94b56] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ade3f042-7427-494b-9654-0b65e074850c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-00a238a5-8e8a-48e8-9e5e-93bc6fa94b56"}]} [ 702.972895] env[68674]: DEBUG nova.scheduler.client.report [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Refreshing inventories for resource provider ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 702.991109] env[68674]: DEBUG nova.scheduler.client.report [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Updating ProviderTree inventory for provider ade3f042-7427-494b-9654-0b65e074850c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 702.991109] env[68674]: DEBUG nova.compute.provider_tree [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 703.002251] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6de054e6-7a4e-4b96-87eb-26d0facf85d5 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Acquiring lock "50bb7509-b7e9-4dc3-9746-acd46010cc26" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 703.002509] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6de054e6-7a4e-4b96-87eb-26d0facf85d5 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Lock "50bb7509-b7e9-4dc3-9746-acd46010cc26" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 703.002713] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6de054e6-7a4e-4b96-87eb-26d0facf85d5 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Acquiring lock "50bb7509-b7e9-4dc3-9746-acd46010cc26-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 703.002917] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6de054e6-7a4e-4b96-87eb-26d0facf85d5 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Lock "50bb7509-b7e9-4dc3-9746-acd46010cc26-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 703.003096] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6de054e6-7a4e-4b96-87eb-26d0facf85d5 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Lock "50bb7509-b7e9-4dc3-9746-acd46010cc26-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 703.008718] env[68674]: INFO nova.compute.manager [None req-6de054e6-7a4e-4b96-87eb-26d0facf85d5 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Terminating instance [ 703.012561] env[68674]: DEBUG nova.scheduler.client.report [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Refreshing aggregate associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, aggregates: None {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 703.043764] env[68674]: DEBUG nova.scheduler.client.report [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Refreshing trait associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 703.120671] env[68674]: DEBUG oslo_vmware.api [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': task-3239748, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.091103} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.124360] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 703.125349] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 703.125591] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 703.134829] env[68674]: DEBUG nova.compute.manager [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 703.169666] env[68674]: DEBUG nova.virt.hardware [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 703.170174] env[68674]: DEBUG nova.virt.hardware [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 703.172402] env[68674]: DEBUG nova.virt.hardware [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 703.176193] env[68674]: DEBUG nova.virt.hardware [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 703.176385] env[68674]: DEBUG nova.virt.hardware [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 703.176541] env[68674]: DEBUG nova.virt.hardware [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 703.176772] env[68674]: DEBUG nova.virt.hardware [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 703.176944] env[68674]: DEBUG nova.virt.hardware [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 703.177133] env[68674]: DEBUG nova.virt.hardware [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 703.177311] env[68674]: DEBUG nova.virt.hardware [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 703.177492] env[68674]: DEBUG nova.virt.hardware [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 703.178439] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8159cf6b-b2af-4d43-8050-9057fedb12aa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.192748] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80295c60-e9bc-4c9c-a13f-4b1c826d2fdd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.372266] env[68674]: DEBUG oslo_vmware.api [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Task: {'id': task-3239749, 'name': ReconfigVM_Task, 'duration_secs': 0.207043} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.372584] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647448', 'volume_id': 'ad78e308-8afd-46bf-a8e5-5e31a5c091b0', 'name': 'volume-ad78e308-8afd-46bf-a8e5-5e31a5c091b0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1189fa93-608b-4684-a675-f1caf29a9f43', 'attached_at': '', 'detached_at': '', 'volume_id': 'ad78e308-8afd-46bf-a8e5-5e31a5c091b0', 'serial': 'ad78e308-8afd-46bf-a8e5-5e31a5c091b0'} {{(pid=68674) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 703.376168] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ccaa3504-4590-41c8-a091-5e901ae2d541 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.381919] env[68674]: DEBUG oslo_vmware.api [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Waiting for the task: (returnval){ [ 703.381919] env[68674]: value = "task-3239750" [ 703.381919] env[68674]: _type = "Task" [ 703.381919] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.394199] env[68674]: DEBUG oslo_vmware.api [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Task: {'id': task-3239750, 'name': Rename_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.446967] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 703.449364] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d30156ba-0efe-4ffa-b92e-7a6693891e79 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.456901] env[68674]: DEBUG oslo_vmware.api [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 703.456901] env[68674]: value = "task-3239751" [ 703.456901] env[68674]: _type = "Task" [ 703.456901] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.470475] env[68674]: DEBUG oslo_vmware.api [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239751, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.515465] env[68674]: DEBUG nova.compute.manager [None req-6de054e6-7a4e-4b96-87eb-26d0facf85d5 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 703.515465] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6de054e6-7a4e-4b96-87eb-26d0facf85d5 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 703.515465] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d014f0db-d519-466f-8c2d-7e1156713fbd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.527720] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6de054e6-7a4e-4b96-87eb-26d0facf85d5 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 703.528960] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f2025b82-aade-4f77-84b6-aea0a2534e02 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.536771] env[68674]: DEBUG oslo_vmware.api [None req-6de054e6-7a4e-4b96-87eb-26d0facf85d5 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Waiting for the task: (returnval){ [ 703.536771] env[68674]: value = "task-3239752" [ 703.536771] env[68674]: _type = "Task" [ 703.536771] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.553197] env[68674]: DEBUG oslo_vmware.api [None req-6de054e6-7a4e-4b96-87eb-26d0facf85d5 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3239752, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.722352] env[68674]: DEBUG nova.network.neutron [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Successfully updated port: 61791433-aab7-4244-91a2-6caef49a0717 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 703.777800] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Acquiring lock "c4fd04a7-2b11-4c4b-84d1-53edc1e3f035" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 703.777994] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Lock "c4fd04a7-2b11-4c4b-84d1-53edc1e3f035" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 703.778920] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffc06475-229c-4c82-bbb4-0606b0943ea4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.789154] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cadd34c8-21e6-4cee-a4c6-1b7d415d7e86 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.824738] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3182fa5-a5ef-429e-8df7-3e04e6415e57 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.834287] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-379b872b-c4c1-420c-97a1-c229954a0dc9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.851998] env[68674]: DEBUG nova.compute.provider_tree [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 703.891655] env[68674]: DEBUG nova.compute.manager [req-500e2542-9be5-4a0e-b456-b5f1cc6b5977 req-f13cd3b9-6c5e-4f39-b725-fff3dee316b1 service nova] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Received event network-vif-plugged-61791433-aab7-4244-91a2-6caef49a0717 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 703.891655] env[68674]: DEBUG oslo_concurrency.lockutils [req-500e2542-9be5-4a0e-b456-b5f1cc6b5977 req-f13cd3b9-6c5e-4f39-b725-fff3dee316b1 service nova] Acquiring lock "d88ccf9b-7432-4be0-82f7-b2a9155f7d86-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 703.891909] env[68674]: DEBUG oslo_concurrency.lockutils [req-500e2542-9be5-4a0e-b456-b5f1cc6b5977 req-f13cd3b9-6c5e-4f39-b725-fff3dee316b1 service nova] Lock "d88ccf9b-7432-4be0-82f7-b2a9155f7d86-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 703.892188] env[68674]: DEBUG oslo_concurrency.lockutils [req-500e2542-9be5-4a0e-b456-b5f1cc6b5977 req-f13cd3b9-6c5e-4f39-b725-fff3dee316b1 service nova] Lock "d88ccf9b-7432-4be0-82f7-b2a9155f7d86-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 703.892467] env[68674]: DEBUG nova.compute.manager [req-500e2542-9be5-4a0e-b456-b5f1cc6b5977 req-f13cd3b9-6c5e-4f39-b725-fff3dee316b1 service nova] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] No waiting events found dispatching network-vif-plugged-61791433-aab7-4244-91a2-6caef49a0717 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 703.892684] env[68674]: WARNING nova.compute.manager [req-500e2542-9be5-4a0e-b456-b5f1cc6b5977 req-f13cd3b9-6c5e-4f39-b725-fff3dee316b1 service nova] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Received unexpected event network-vif-plugged-61791433-aab7-4244-91a2-6caef49a0717 for instance with vm_state building and task_state spawning. [ 703.892848] env[68674]: DEBUG nova.compute.manager [req-500e2542-9be5-4a0e-b456-b5f1cc6b5977 req-f13cd3b9-6c5e-4f39-b725-fff3dee316b1 service nova] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Received event network-changed-61791433-aab7-4244-91a2-6caef49a0717 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 703.893011] env[68674]: DEBUG nova.compute.manager [req-500e2542-9be5-4a0e-b456-b5f1cc6b5977 req-f13cd3b9-6c5e-4f39-b725-fff3dee316b1 service nova] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Refreshing instance network info cache due to event network-changed-61791433-aab7-4244-91a2-6caef49a0717. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 703.893233] env[68674]: DEBUG oslo_concurrency.lockutils [req-500e2542-9be5-4a0e-b456-b5f1cc6b5977 req-f13cd3b9-6c5e-4f39-b725-fff3dee316b1 service nova] Acquiring lock "refresh_cache-d88ccf9b-7432-4be0-82f7-b2a9155f7d86" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.893370] env[68674]: DEBUG oslo_concurrency.lockutils [req-500e2542-9be5-4a0e-b456-b5f1cc6b5977 req-f13cd3b9-6c5e-4f39-b725-fff3dee316b1 service nova] Acquired lock "refresh_cache-d88ccf9b-7432-4be0-82f7-b2a9155f7d86" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 703.893525] env[68674]: DEBUG nova.network.neutron [req-500e2542-9be5-4a0e-b456-b5f1cc6b5977 req-f13cd3b9-6c5e-4f39-b725-fff3dee316b1 service nova] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Refreshing network info cache for port 61791433-aab7-4244-91a2-6caef49a0717 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 703.900978] env[68674]: DEBUG oslo_vmware.api [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Task: {'id': task-3239750, 'name': Rename_Task, 'duration_secs': 0.205658} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.900978] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 703.900978] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b6ce4871-57b5-4b72-8a70-dacada49a8e9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.907593] env[68674]: DEBUG oslo_vmware.api [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Waiting for the task: (returnval){ [ 703.907593] env[68674]: value = "task-3239753" [ 703.907593] env[68674]: _type = "Task" [ 703.907593] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.915843] env[68674]: DEBUG oslo_vmware.api [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Task: {'id': task-3239753, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.965221] env[68674]: DEBUG oslo_vmware.api [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239751, 'name': PowerOffVM_Task, 'duration_secs': 0.222175} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.965498] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 703.965757] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 703.966614] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d6a5093-e52d-4862-be85-1493103b6f7d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.973849] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 703.974118] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-763c9b06-1241-4a52-9161-d77325ce7bfc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.046535] env[68674]: DEBUG oslo_vmware.api [None req-6de054e6-7a4e-4b96-87eb-26d0facf85d5 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3239752, 'name': PowerOffVM_Task, 'duration_secs': 0.242988} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.048896] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6de054e6-7a4e-4b96-87eb-26d0facf85d5 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 704.049124] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6de054e6-7a4e-4b96-87eb-26d0facf85d5 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 704.049341] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 704.049523] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 704.049588] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Deleting the datastore file [datastore2] baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 704.049829] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-af1ac381-7daf-4dad-b2f7-b6dbd7d293bd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.051518] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-323f5e80-749f-4489-920b-bdf2a1c506ab {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.059475] env[68674]: DEBUG oslo_vmware.api [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 704.059475] env[68674]: value = "task-3239756" [ 704.059475] env[68674]: _type = "Task" [ 704.059475] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.067073] env[68674]: DEBUG oslo_vmware.api [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239756, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.131453] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6de054e6-7a4e-4b96-87eb-26d0facf85d5 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 704.131640] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6de054e6-7a4e-4b96-87eb-26d0facf85d5 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 704.131685] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-6de054e6-7a4e-4b96-87eb-26d0facf85d5 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Deleting the datastore file [datastore1] 50bb7509-b7e9-4dc3-9746-acd46010cc26 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 704.131984] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-439fa0fc-8b52-4656-a6fe-a7e8f5a323ea {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.138940] env[68674]: DEBUG oslo_vmware.api [None req-6de054e6-7a4e-4b96-87eb-26d0facf85d5 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Waiting for the task: (returnval){ [ 704.138940] env[68674]: value = "task-3239757" [ 704.138940] env[68674]: _type = "Task" [ 704.138940] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.150784] env[68674]: DEBUG oslo_vmware.api [None req-6de054e6-7a4e-4b96-87eb-26d0facf85d5 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3239757, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.178309] env[68674]: DEBUG nova.virt.hardware [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 704.178615] env[68674]: DEBUG nova.virt.hardware [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 704.178834] env[68674]: DEBUG nova.virt.hardware [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 704.179082] env[68674]: DEBUG nova.virt.hardware [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 704.179255] env[68674]: DEBUG nova.virt.hardware [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 704.179417] env[68674]: DEBUG nova.virt.hardware [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 704.179641] env[68674]: DEBUG nova.virt.hardware [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 704.179815] env[68674]: DEBUG nova.virt.hardware [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 704.179997] env[68674]: DEBUG nova.virt.hardware [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 704.180212] env[68674]: DEBUG nova.virt.hardware [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 704.180421] env[68674]: DEBUG nova.virt.hardware [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 704.181433] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7887a73-550f-43d6-befd-4c43f31761bc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.190893] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-904ea62a-0b1e-49ba-b68b-9f6cb0aafaca {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.208455] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Instance VIF info [] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 704.214245] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 704.214551] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 704.214774] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c6919655-09e2-42ae-920e-1e37dcc68026 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.232051] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "refresh_cache-d88ccf9b-7432-4be0-82f7-b2a9155f7d86" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 704.237863] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 704.237863] env[68674]: value = "task-3239758" [ 704.237863] env[68674]: _type = "Task" [ 704.237863] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.251269] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239758, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.383763] env[68674]: ERROR nova.scheduler.client.report [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] [req-b150b1c2-6aaf-4227-ad10-da2690748334] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ade3f042-7427-494b-9654-0b65e074850c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b150b1c2-6aaf-4227-ad10-da2690748334"}]} [ 704.411018] env[68674]: DEBUG nova.scheduler.client.report [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Refreshing inventories for resource provider ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 704.425778] env[68674]: DEBUG oslo_vmware.api [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Task: {'id': task-3239753, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.430205] env[68674]: DEBUG nova.scheduler.client.report [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Updating ProviderTree inventory for provider ade3f042-7427-494b-9654-0b65e074850c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 704.430431] env[68674]: DEBUG nova.compute.provider_tree [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 704.449906] env[68674]: DEBUG nova.scheduler.client.report [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Refreshing aggregate associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, aggregates: None {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 704.472201] env[68674]: DEBUG nova.network.neutron [req-500e2542-9be5-4a0e-b456-b5f1cc6b5977 req-f13cd3b9-6c5e-4f39-b725-fff3dee316b1 service nova] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 704.480756] env[68674]: DEBUG nova.scheduler.client.report [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Refreshing trait associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 704.572333] env[68674]: DEBUG oslo_vmware.api [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239756, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.240297} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.575580] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 704.575699] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 704.575873] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 704.625264] env[68674]: DEBUG nova.network.neutron [req-500e2542-9be5-4a0e-b456-b5f1cc6b5977 req-f13cd3b9-6c5e-4f39-b725-fff3dee316b1 service nova] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.651927] env[68674]: DEBUG oslo_vmware.api [None req-6de054e6-7a4e-4b96-87eb-26d0facf85d5 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3239757, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.207347} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.651927] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-6de054e6-7a4e-4b96-87eb-26d0facf85d5 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 704.651927] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6de054e6-7a4e-4b96-87eb-26d0facf85d5 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 704.651927] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6de054e6-7a4e-4b96-87eb-26d0facf85d5 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 704.652287] env[68674]: INFO nova.compute.manager [None req-6de054e6-7a4e-4b96-87eb-26d0facf85d5 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Took 1.14 seconds to destroy the instance on the hypervisor. [ 704.652445] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6de054e6-7a4e-4b96-87eb-26d0facf85d5 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 704.652665] env[68674]: DEBUG nova.compute.manager [-] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 704.652784] env[68674]: DEBUG nova.network.neutron [-] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 704.747867] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239758, 'name': CreateVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.925025] env[68674]: DEBUG oslo_vmware.api [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Task: {'id': task-3239753, 'name': PowerOnVM_Task, 'duration_secs': 0.626969} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.925025] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 704.925025] env[68674]: INFO nova.compute.manager [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Took 8.83 seconds to spawn the instance on the hypervisor. [ 704.925025] env[68674]: DEBUG nova.compute.manager [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 704.926362] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-451e0b98-18e5-47ec-add7-fa60a0a66835 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.116519] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25d6c8e8-060c-4d8a-af1f-6ba79f0e6a50 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.126392] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb6a3e2b-c9ff-4998-b967-5264dc94db5c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.131507] env[68674]: DEBUG oslo_concurrency.lockutils [req-500e2542-9be5-4a0e-b456-b5f1cc6b5977 req-f13cd3b9-6c5e-4f39-b725-fff3dee316b1 service nova] Releasing lock "refresh_cache-d88ccf9b-7432-4be0-82f7-b2a9155f7d86" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 705.131786] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquired lock "refresh_cache-d88ccf9b-7432-4be0-82f7-b2a9155f7d86" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 705.131965] env[68674]: DEBUG nova.network.neutron [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 705.159936] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46dca3c9-8763-48da-956a-f5cc67c9fd63 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.168029] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-243c1fcb-6336-4fd3-aa88-9e2306719211 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.182268] env[68674]: DEBUG nova.compute.provider_tree [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 705.248442] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239758, 'name': CreateVM_Task, 'duration_secs': 0.51477} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.248590] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 705.248995] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.249265] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 705.249638] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 705.249895] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5162eb4f-1dd8-4f90-bd42-26da854777c6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.254359] env[68674]: DEBUG oslo_vmware.api [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Waiting for the task: (returnval){ [ 705.254359] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52873e0c-5484-66f0-ed42-edec8c68dc80" [ 705.254359] env[68674]: _type = "Task" [ 705.254359] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.262185] env[68674]: DEBUG oslo_vmware.api [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52873e0c-5484-66f0-ed42-edec8c68dc80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.448985] env[68674]: INFO nova.compute.manager [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Took 46.62 seconds to build instance. [ 705.619956] env[68674]: DEBUG nova.virt.hardware [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 705.620160] env[68674]: DEBUG nova.virt.hardware [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 705.620317] env[68674]: DEBUG nova.virt.hardware [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 705.620495] env[68674]: DEBUG nova.virt.hardware [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 705.620638] env[68674]: DEBUG nova.virt.hardware [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 705.620782] env[68674]: DEBUG nova.virt.hardware [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 705.621129] env[68674]: DEBUG nova.virt.hardware [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 705.621365] env[68674]: DEBUG nova.virt.hardware [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 705.621590] env[68674]: DEBUG nova.virt.hardware [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 705.621797] env[68674]: DEBUG nova.virt.hardware [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 705.622023] env[68674]: DEBUG nova.virt.hardware [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 705.622919] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ac5641f-6d95-477e-8479-ba4a140fbffc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.631411] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93a89a53-47aa-437d-9f43-2bbd4d52408b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.654209] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:f2:5f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b5c34919-7d52-4a52-bab1-81af4c8182ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a459b31b-865e-45d7-a62b-b7c95eb50c15', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 705.662713] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 705.662713] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 705.662713] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0d2e9639-ca8a-4430-af9d-bec599cd5c69 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.677366] env[68674]: DEBUG nova.network.neutron [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 705.686475] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 705.686475] env[68674]: value = "task-3239759" [ 705.686475] env[68674]: _type = "Task" [ 705.686475] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.695343] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239759, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.724250] env[68674]: DEBUG nova.scheduler.client.report [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Updated inventory for provider ade3f042-7427-494b-9654-0b65e074850c with generation 69 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 705.725151] env[68674]: DEBUG nova.compute.provider_tree [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Updating resource provider ade3f042-7427-494b-9654-0b65e074850c generation from 69 to 70 during operation: update_inventory {{(pid=68674) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 705.725151] env[68674]: DEBUG nova.compute.provider_tree [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 705.733017] env[68674]: DEBUG nova.network.neutron [-] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.765378] env[68674]: DEBUG oslo_vmware.api [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52873e0c-5484-66f0-ed42-edec8c68dc80, 'name': SearchDatastore_Task, 'duration_secs': 0.024488} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.765713] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 705.765931] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 705.766183] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.766327] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 705.766504] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 705.766768] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-49dd2744-428a-44e2-ac62-0f9feff99d68 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.775645] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 705.775923] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 705.777183] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d7978c3-1b2e-4163-a33e-c71c266add56 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.782782] env[68674]: DEBUG oslo_vmware.api [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Waiting for the task: (returnval){ [ 705.782782] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52017f74-f915-c024-fbf4-1acbea9852b1" [ 705.782782] env[68674]: _type = "Task" [ 705.782782] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.801775] env[68674]: DEBUG oslo_vmware.api [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52017f74-f915-c024-fbf4-1acbea9852b1, 'name': SearchDatastore_Task, 'duration_secs': 0.009506} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.801775] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a03b74ca-b2a8-42a2-b2af-2606b77636d1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.808208] env[68674]: DEBUG oslo_vmware.api [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Waiting for the task: (returnval){ [ 705.808208] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b433bd-45ad-262e-5f45-270fda9614fb" [ 705.808208] env[68674]: _type = "Task" [ 705.808208] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.828310] env[68674]: DEBUG oslo_vmware.api [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b433bd-45ad-262e-5f45-270fda9614fb, 'name': SearchDatastore_Task, 'duration_secs': 0.014453} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.828310] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 705.828310] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 367461db-8bc4-4cf0-b7f6-f79ee2bf8589/367461db-8bc4-4cf0-b7f6-f79ee2bf8589.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 705.828310] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-312b7daf-f035-4da1-afa0-98e75f714018 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.837019] env[68674]: DEBUG oslo_vmware.api [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Waiting for the task: (returnval){ [ 705.837019] env[68674]: value = "task-3239760" [ 705.837019] env[68674]: _type = "Task" [ 705.837019] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.846033] env[68674]: DEBUG oslo_vmware.api [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': task-3239760, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.880368] env[68674]: DEBUG nova.network.neutron [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Updating instance_info_cache with network_info: [{"id": "61791433-aab7-4244-91a2-6caef49a0717", "address": "fa:16:3e:c2:2b:e7", "network": {"id": "d412f884-932c-461f-8f04-990897b04532", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-692483706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6b179855b874365964446f95f9f5a53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61791433-aa", "ovs_interfaceid": "61791433-aab7-4244-91a2-6caef49a0717", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.953730] env[68674]: DEBUG oslo_concurrency.lockutils [None req-788a8faa-3e56-4e50-8e1f-e1e6cd6c6072 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Lock "1189fa93-608b-4684-a675-f1caf29a9f43" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.820s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 706.038255] env[68674]: DEBUG nova.compute.manager [req-c9706f11-f676-4ebd-be89-0b44d5fcb04e req-55250bb2-2af6-4fd6-8f68-b97c7551ccdc service nova] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Received event network-vif-deleted-21aba0b8-ff69-4bec-829d-29a8f6941877 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 706.174099] env[68674]: DEBUG nova.compute.manager [req-20348597-5247-4cdd-ac9a-4c47e6f9c8de req-fc7ec7f7-0824-4d5f-8aae-37bad4554b57 service nova] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Received event network-changed-f8fd3dc4-58cc-4298-8fe7-96a500eacace {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 706.174099] env[68674]: DEBUG nova.compute.manager [req-20348597-5247-4cdd-ac9a-4c47e6f9c8de req-fc7ec7f7-0824-4d5f-8aae-37bad4554b57 service nova] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Refreshing instance network info cache due to event network-changed-f8fd3dc4-58cc-4298-8fe7-96a500eacace. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 706.174099] env[68674]: DEBUG oslo_concurrency.lockutils [req-20348597-5247-4cdd-ac9a-4c47e6f9c8de req-fc7ec7f7-0824-4d5f-8aae-37bad4554b57 service nova] Acquiring lock "refresh_cache-1189fa93-608b-4684-a675-f1caf29a9f43" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.174099] env[68674]: DEBUG oslo_concurrency.lockutils [req-20348597-5247-4cdd-ac9a-4c47e6f9c8de req-fc7ec7f7-0824-4d5f-8aae-37bad4554b57 service nova] Acquired lock "refresh_cache-1189fa93-608b-4684-a675-f1caf29a9f43" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 706.174099] env[68674]: DEBUG nova.network.neutron [req-20348597-5247-4cdd-ac9a-4c47e6f9c8de req-fc7ec7f7-0824-4d5f-8aae-37bad4554b57 service nova] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Refreshing network info cache for port f8fd3dc4-58cc-4298-8fe7-96a500eacace {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 706.200025] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239759, 'name': CreateVM_Task, 'duration_secs': 0.398202} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.200025] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 706.200025] env[68674]: DEBUG oslo_concurrency.lockutils [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.200025] env[68674]: DEBUG oslo_concurrency.lockutils [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 706.200025] env[68674]: DEBUG oslo_concurrency.lockutils [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 706.200025] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f31ef8fe-e7a1-40d4-b198-de55bb3b6767 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.209463] env[68674]: DEBUG oslo_vmware.api [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 706.209463] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ca11ac-3596-5b3b-ed22-6b12e461780b" [ 706.209463] env[68674]: _type = "Task" [ 706.209463] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.220629] env[68674]: DEBUG oslo_vmware.api [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ca11ac-3596-5b3b-ed22-6b12e461780b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.232340] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 5.137s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 706.235162] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.353s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 706.236825] env[68674]: INFO nova.compute.claims [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 706.240400] env[68674]: INFO nova.compute.manager [-] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Took 1.59 seconds to deallocate network for instance. [ 706.255532] env[68674]: INFO nova.scheduler.client.report [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Deleted allocations for instance f45200cd-6cb0-498a-8858-1e70177031d9 [ 706.352352] env[68674]: DEBUG oslo_vmware.api [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': task-3239760, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.384059] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Releasing lock "refresh_cache-d88ccf9b-7432-4be0-82f7-b2a9155f7d86" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 706.384059] env[68674]: DEBUG nova.compute.manager [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Instance network_info: |[{"id": "61791433-aab7-4244-91a2-6caef49a0717", "address": "fa:16:3e:c2:2b:e7", "network": {"id": "d412f884-932c-461f-8f04-990897b04532", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-692483706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6b179855b874365964446f95f9f5a53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61791433-aa", "ovs_interfaceid": "61791433-aab7-4244-91a2-6caef49a0717", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 706.384387] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:2b:e7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '08fb4857-7f9b-4f97-86ef-415341fb595d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '61791433-aab7-4244-91a2-6caef49a0717', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 706.392493] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 706.392493] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 706.392493] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a63af397-008a-4b26-9ea4-e480e0dac21e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.414426] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 706.414426] env[68674]: value = "task-3239761" [ 706.414426] env[68674]: _type = "Task" [ 706.414426] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.425650] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239761, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.456397] env[68674]: DEBUG nova.compute.manager [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 706.720211] env[68674]: DEBUG oslo_vmware.api [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ca11ac-3596-5b3b-ed22-6b12e461780b, 'name': SearchDatastore_Task, 'duration_secs': 0.062704} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.720552] env[68674]: DEBUG oslo_concurrency.lockutils [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 706.720790] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 706.721134] env[68674]: DEBUG oslo_concurrency.lockutils [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.721212] env[68674]: DEBUG oslo_concurrency.lockutils [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 706.721397] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 706.721664] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e5c3239a-5805-4b4f-876d-426624f82f3a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.732811] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 706.733018] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 706.736215] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f424ab6-67d1-4b2c-be30-995a8355accd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.745550] env[68674]: DEBUG oslo_vmware.api [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 706.745550] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5216a621-b315-2235-6d1a-4fde97ea4f32" [ 706.745550] env[68674]: _type = "Task" [ 706.745550] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.749963] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6de054e6-7a4e-4b96-87eb-26d0facf85d5 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 706.756652] env[68674]: DEBUG oslo_vmware.api [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5216a621-b315-2235-6d1a-4fde97ea4f32, 'name': SearchDatastore_Task, 'duration_secs': 0.010083} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.757460] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a8bb9c8-9a99-495b-96f2-2f0b06cac3fc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.765247] env[68674]: DEBUG oslo_vmware.api [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 706.765247] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5252a421-f634-560e-80e5-5c2c53d8e3dd" [ 706.765247] env[68674]: _type = "Task" [ 706.765247] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.765757] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ecb79f11-003c-4ec3-9832-a25d763251e3 tempest-ServerShowV257Test-435696042 tempest-ServerShowV257Test-435696042-project-member] Lock "f45200cd-6cb0-498a-8858-1e70177031d9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.068s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 706.776216] env[68674]: DEBUG oslo_vmware.api [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5252a421-f634-560e-80e5-5c2c53d8e3dd, 'name': SearchDatastore_Task, 'duration_secs': 0.008562} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.776500] env[68674]: DEBUG oslo_concurrency.lockutils [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 706.776767] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82/baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 706.777034] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c7933b69-9f02-4b5b-8449-89ae070d7933 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.783357] env[68674]: DEBUG oslo_vmware.api [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 706.783357] env[68674]: value = "task-3239762" [ 706.783357] env[68674]: _type = "Task" [ 706.783357] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.791958] env[68674]: DEBUG oslo_vmware.api [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239762, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.845846] env[68674]: DEBUG oslo_vmware.api [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': task-3239760, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.535881} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.846201] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 367461db-8bc4-4cf0-b7f6-f79ee2bf8589/367461db-8bc4-4cf0-b7f6-f79ee2bf8589.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 706.846497] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 706.846760] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fa4bc609-88c9-476d-81e2-86a7d19cc628 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.852682] env[68674]: DEBUG oslo_vmware.api [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Waiting for the task: (returnval){ [ 706.852682] env[68674]: value = "task-3239763" [ 706.852682] env[68674]: _type = "Task" [ 706.852682] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.860452] env[68674]: DEBUG oslo_vmware.api [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': task-3239763, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.927037] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239761, 'name': CreateVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.989519] env[68674]: DEBUG oslo_concurrency.lockutils [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 707.086920] env[68674]: DEBUG nova.network.neutron [req-20348597-5247-4cdd-ac9a-4c47e6f9c8de req-fc7ec7f7-0824-4d5f-8aae-37bad4554b57 service nova] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Updated VIF entry in instance network info cache for port f8fd3dc4-58cc-4298-8fe7-96a500eacace. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 707.088095] env[68674]: DEBUG nova.network.neutron [req-20348597-5247-4cdd-ac9a-4c47e6f9c8de req-fc7ec7f7-0824-4d5f-8aae-37bad4554b57 service nova] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Updating instance_info_cache with network_info: [{"id": "f8fd3dc4-58cc-4298-8fe7-96a500eacace", "address": "fa:16:3e:64:ee:1d", "network": {"id": "65cdab1a-fb43-4865-af7f-680ee8a72fc3", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1526099151-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.201", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a02f7fd3043c424f92a9e23724ed8296", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "84aee122-f630-43c5-9cc1-3a38d3819c82", "external-id": "nsx-vlan-transportzone-816", "segmentation_id": 816, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8fd3dc4-58", "ovs_interfaceid": "f8fd3dc4-58cc-4298-8fe7-96a500eacace", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.296782] env[68674]: DEBUG oslo_vmware.api [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239762, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.51124} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.297622] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82/baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 707.297622] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 707.297622] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7fb4ebe4-6d07-4130-ba02-b32af583e87a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.306045] env[68674]: DEBUG oslo_vmware.api [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 707.306045] env[68674]: value = "task-3239764" [ 707.306045] env[68674]: _type = "Task" [ 707.306045] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.315757] env[68674]: DEBUG oslo_vmware.api [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239764, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.366315] env[68674]: DEBUG oslo_vmware.api [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': task-3239763, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068357} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.366586] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 707.367378] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34ddb047-462e-4e5a-a457-3ca2b9bbe8c8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.387300] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Reconfiguring VM instance instance-00000026 to attach disk [datastore2] 367461db-8bc4-4cf0-b7f6-f79ee2bf8589/367461db-8bc4-4cf0-b7f6-f79ee2bf8589.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 707.390191] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b2f451ab-b4b4-468b-9b65-ed9ae2cf7c5e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.409665] env[68674]: DEBUG oslo_vmware.api [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Waiting for the task: (returnval){ [ 707.409665] env[68674]: value = "task-3239765" [ 707.409665] env[68674]: _type = "Task" [ 707.409665] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.425193] env[68674]: DEBUG oslo_vmware.api [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': task-3239765, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.430200] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239761, 'name': CreateVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.591261] env[68674]: DEBUG oslo_concurrency.lockutils [req-20348597-5247-4cdd-ac9a-4c47e6f9c8de req-fc7ec7f7-0824-4d5f-8aae-37bad4554b57 service nova] Releasing lock "refresh_cache-1189fa93-608b-4684-a675-f1caf29a9f43" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 707.818650] env[68674]: DEBUG oslo_vmware.api [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239764, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074635} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.818650] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 707.818650] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b73bf4d-d6f9-4479-9a3f-8a15a40c671d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.841258] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Reconfiguring VM instance instance-00000005 to attach disk [datastore2] baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82/baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 707.844012] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a2a4ab7-4081-4da4-88a6-fd771529d007 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.863220] env[68674]: DEBUG oslo_vmware.api [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 707.863220] env[68674]: value = "task-3239766" [ 707.863220] env[68674]: _type = "Task" [ 707.863220] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.870885] env[68674]: DEBUG oslo_vmware.api [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239766, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.899030] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a12568fe-9b9a-4e9b-a5f0-79bd5d62b7fc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.907334] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13935777-146e-450b-9d41-5aff100b9763 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.952117] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81427df2-81f8-4380-bd7a-fc0bed861134 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.954234] env[68674]: DEBUG oslo_vmware.api [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': task-3239765, 'name': ReconfigVM_Task, 'duration_secs': 0.331463} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.957686] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Reconfigured VM instance instance-00000026 to attach disk [datastore2] 367461db-8bc4-4cf0-b7f6-f79ee2bf8589/367461db-8bc4-4cf0-b7f6-f79ee2bf8589.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 707.958370] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239761, 'name': CreateVM_Task, 'duration_secs': 1.475261} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.958954] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6e893d33-3c6e-474e-8154-277abe314ad9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.960648] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 707.963285] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.963490] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 707.963828] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 707.964546] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9febbfdb-dbf4-4692-92ed-da55cd8e8bf0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.967196] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eee63241-2ba9-4724-8da6-2c310909d509 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.972800] env[68674]: DEBUG oslo_vmware.api [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Waiting for the task: (returnval){ [ 707.972800] env[68674]: value = "task-3239767" [ 707.972800] env[68674]: _type = "Task" [ 707.972800] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.986043] env[68674]: DEBUG nova.compute.provider_tree [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 707.987817] env[68674]: DEBUG oslo_vmware.api [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 707.987817] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]522b91fd-604c-1b20-df94-01a418c5309a" [ 707.987817] env[68674]: _type = "Task" [ 707.987817] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.995384] env[68674]: DEBUG oslo_vmware.api [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': task-3239767, 'name': Rename_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.001831] env[68674]: DEBUG oslo_vmware.api [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]522b91fd-604c-1b20-df94-01a418c5309a, 'name': SearchDatastore_Task, 'duration_secs': 0.011257} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.002769] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 708.003050] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 708.003308] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.003472] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 708.003660] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 708.004197] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fd2d8e6f-5f42-4d7d-a7fd-b2effa4edaba {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.012155] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 708.012372] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 708.013121] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47fa2b81-80b5-4ef7-b8ac-2a4a612878ee {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.017922] env[68674]: DEBUG oslo_vmware.api [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 708.017922] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52af6e21-77ad-2311-e296-0ac234644adb" [ 708.017922] env[68674]: _type = "Task" [ 708.017922] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.026080] env[68674]: DEBUG oslo_vmware.api [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52af6e21-77ad-2311-e296-0ac234644adb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.373343] env[68674]: DEBUG oslo_vmware.api [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239766, 'name': ReconfigVM_Task, 'duration_secs': 0.282637} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.373678] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Reconfigured VM instance instance-00000005 to attach disk [datastore2] baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82/baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 708.374300] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8143ad4c-b1f3-4dd0-a427-d638769f0149 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.381165] env[68674]: DEBUG oslo_vmware.api [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 708.381165] env[68674]: value = "task-3239768" [ 708.381165] env[68674]: _type = "Task" [ 708.381165] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.392106] env[68674]: DEBUG oslo_vmware.api [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239768, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.483816] env[68674]: DEBUG oslo_vmware.api [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': task-3239767, 'name': Rename_Task, 'duration_secs': 0.141741} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.484165] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 708.484496] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8943d5c4-269a-4c6b-aa98-52ad68a84bfa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.490029] env[68674]: DEBUG nova.scheduler.client.report [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 708.494606] env[68674]: DEBUG oslo_vmware.api [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Waiting for the task: (returnval){ [ 708.494606] env[68674]: value = "task-3239769" [ 708.494606] env[68674]: _type = "Task" [ 708.494606] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.502759] env[68674]: DEBUG oslo_vmware.api [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': task-3239769, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.527880] env[68674]: DEBUG oslo_vmware.api [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52af6e21-77ad-2311-e296-0ac234644adb, 'name': SearchDatastore_Task, 'duration_secs': 0.008419} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.528742] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7ab1ae8-9d01-40ac-9f6c-f44c88e7c135 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.534076] env[68674]: DEBUG oslo_vmware.api [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 708.534076] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5286074a-e2bf-79ce-950b-1c8bb7bba60b" [ 708.534076] env[68674]: _type = "Task" [ 708.534076] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.542186] env[68674]: DEBUG oslo_vmware.api [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5286074a-e2bf-79ce-950b-1c8bb7bba60b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.891470] env[68674]: DEBUG oslo_vmware.api [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239768, 'name': Rename_Task, 'duration_secs': 0.140201} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.891736] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 708.891943] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9b3f2a1a-ee26-4045-ac85-ea4d97e49247 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.899512] env[68674]: DEBUG oslo_vmware.api [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 708.899512] env[68674]: value = "task-3239770" [ 708.899512] env[68674]: _type = "Task" [ 708.899512] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.908847] env[68674]: DEBUG oslo_vmware.api [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239770, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.996850] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.761s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 708.996850] env[68674]: DEBUG nova.compute.manager [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 708.999527] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.574s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 709.001423] env[68674]: INFO nova.compute.claims [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 709.015275] env[68674]: DEBUG oslo_vmware.api [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': task-3239769, 'name': PowerOnVM_Task, 'duration_secs': 0.453937} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.015540] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 709.015815] env[68674]: DEBUG nova.compute.manager [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 709.016742] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c5dfa14-4d1c-413c-816e-a935c3e8e65a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.045941] env[68674]: DEBUG oslo_vmware.api [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5286074a-e2bf-79ce-950b-1c8bb7bba60b, 'name': SearchDatastore_Task, 'duration_secs': 0.008753} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.047193] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 709.047479] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] d88ccf9b-7432-4be0-82f7-b2a9155f7d86/d88ccf9b-7432-4be0-82f7-b2a9155f7d86.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 709.047744] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1948e1bf-ad38-46f6-837b-4b270e9cd4fe {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.056367] env[68674]: DEBUG oslo_vmware.api [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 709.056367] env[68674]: value = "task-3239771" [ 709.056367] env[68674]: _type = "Task" [ 709.056367] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.064997] env[68674]: DEBUG oslo_vmware.api [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239771, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.410529] env[68674]: DEBUG oslo_vmware.api [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239770, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.517509] env[68674]: DEBUG nova.compute.utils [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 709.518551] env[68674]: DEBUG nova.compute.manager [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 709.518905] env[68674]: DEBUG nova.network.neutron [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 709.533487] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.570804] env[68674]: DEBUG oslo_vmware.api [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239771, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.589432] env[68674]: DEBUG nova.policy [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1df2b462632d47b0ba9c31ae7aececf7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '435fbf1f847d4d36ba126fc8c49b59fd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 709.905076] env[68674]: DEBUG nova.network.neutron [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Successfully created port: 328800af-995b-4980-a8a3-4a51aa3c17e5 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 709.912071] env[68674]: DEBUG oslo_vmware.api [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239770, 'name': PowerOnVM_Task, 'duration_secs': 0.550363} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.912798] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 709.912798] env[68674]: DEBUG nova.compute.manager [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 709.913364] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf2e2879-2b11-4a6d-a7c3-8edda142cbf3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.022247] env[68674]: DEBUG nova.compute.manager [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 710.067305] env[68674]: DEBUG oslo_vmware.api [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239771, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.605714} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.069714] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] d88ccf9b-7432-4be0-82f7-b2a9155f7d86/d88ccf9b-7432-4be0-82f7-b2a9155f7d86.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 710.069935] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 710.070347] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0ea3dc82-c8bb-48a2-8058-781a441c0709 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.077769] env[68674]: DEBUG oslo_vmware.api [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 710.077769] env[68674]: value = "task-3239772" [ 710.077769] env[68674]: _type = "Task" [ 710.077769] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.087341] env[68674]: DEBUG oslo_vmware.api [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239772, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.289754] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d418eace-cfba-4b40-8806-482dbab1f39d tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Acquiring lock "367461db-8bc4-4cf0-b7f6-f79ee2bf8589" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 710.289754] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d418eace-cfba-4b40-8806-482dbab1f39d tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Lock "367461db-8bc4-4cf0-b7f6-f79ee2bf8589" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 710.289913] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d418eace-cfba-4b40-8806-482dbab1f39d tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Acquiring lock "367461db-8bc4-4cf0-b7f6-f79ee2bf8589-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 710.290069] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d418eace-cfba-4b40-8806-482dbab1f39d tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Lock "367461db-8bc4-4cf0-b7f6-f79ee2bf8589-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 710.290245] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d418eace-cfba-4b40-8806-482dbab1f39d tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Lock "367461db-8bc4-4cf0-b7f6-f79ee2bf8589-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 710.294287] env[68674]: INFO nova.compute.manager [None req-d418eace-cfba-4b40-8806-482dbab1f39d tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Terminating instance [ 710.440499] env[68674]: DEBUG oslo_concurrency.lockutils [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 710.588162] env[68674]: DEBUG oslo_vmware.api [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239772, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.240333} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.590779] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 710.591682] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a37067a-0741-4d73-aebf-7f6a6ac01795 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.613444] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Reconfiguring VM instance instance-00000028 to attach disk [datastore2] d88ccf9b-7432-4be0-82f7-b2a9155f7d86/d88ccf9b-7432-4be0-82f7-b2a9155f7d86.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 710.614630] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9a2d8948-7f2a-4a71-ba1a-77d1119715d8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.629456] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af026ff8-406c-4b95-a491-47b4d2d39055 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.638160] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f2b0c10-cddd-4143-a6be-6ce8adb98de4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.641479] env[68674]: DEBUG oslo_vmware.api [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 710.641479] env[68674]: value = "task-3239773" [ 710.641479] env[68674]: _type = "Task" [ 710.641479] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.671523] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1532d5c5-a8ae-4662-a738-f3052d541191 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.677269] env[68674]: DEBUG oslo_vmware.api [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239773, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.682258] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-895e9592-ff0a-4edc-b931-a63126815dc7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.695441] env[68674]: DEBUG nova.compute.provider_tree [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 710.799839] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d418eace-cfba-4b40-8806-482dbab1f39d tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Acquiring lock "refresh_cache-367461db-8bc4-4cf0-b7f6-f79ee2bf8589" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.800041] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d418eace-cfba-4b40-8806-482dbab1f39d tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Acquired lock "refresh_cache-367461db-8bc4-4cf0-b7f6-f79ee2bf8589" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 710.800220] env[68674]: DEBUG nova.network.neutron [None req-d418eace-cfba-4b40-8806-482dbab1f39d tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 711.033034] env[68674]: DEBUG nova.compute.manager [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 711.053672] env[68674]: DEBUG nova.virt.hardware [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 711.053817] env[68674]: DEBUG nova.virt.hardware [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 711.053918] env[68674]: DEBUG nova.virt.hardware [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 711.054136] env[68674]: DEBUG nova.virt.hardware [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 711.054258] env[68674]: DEBUG nova.virt.hardware [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 711.054432] env[68674]: DEBUG nova.virt.hardware [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 711.054702] env[68674]: DEBUG nova.virt.hardware [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 711.054883] env[68674]: DEBUG nova.virt.hardware [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 711.055047] env[68674]: DEBUG nova.virt.hardware [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 711.055225] env[68674]: DEBUG nova.virt.hardware [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 711.055396] env[68674]: DEBUG nova.virt.hardware [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 711.056646] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93985b2a-eb23-4b40-921d-934020bfd10f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.064444] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e27b20f-c455-46ff-9def-5d39eaa0633c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.151169] env[68674]: DEBUG oslo_vmware.api [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239773, 'name': ReconfigVM_Task, 'duration_secs': 0.328825} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.151473] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Reconfigured VM instance instance-00000028 to attach disk [datastore2] d88ccf9b-7432-4be0-82f7-b2a9155f7d86/d88ccf9b-7432-4be0-82f7-b2a9155f7d86.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 711.152121] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3c531a65-cf73-4975-9c18-b9899337d914 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.161037] env[68674]: DEBUG oslo_vmware.api [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 711.161037] env[68674]: value = "task-3239774" [ 711.161037] env[68674]: _type = "Task" [ 711.161037] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.171347] env[68674]: DEBUG oslo_vmware.api [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239774, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.198412] env[68674]: DEBUG nova.scheduler.client.report [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 711.241528] env[68674]: INFO nova.compute.manager [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Rebuilding instance [ 711.286386] env[68674]: DEBUG nova.compute.manager [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 711.286386] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02e4d444-7b02-4ec4-8b98-f0ecea880aa4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.441994] env[68674]: DEBUG nova.network.neutron [None req-d418eace-cfba-4b40-8806-482dbab1f39d tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 711.542136] env[68674]: DEBUG nova.network.neutron [None req-d418eace-cfba-4b40-8806-482dbab1f39d tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.677909] env[68674]: DEBUG oslo_vmware.api [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239774, 'name': Rename_Task, 'duration_secs': 0.149003} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.678222] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 711.678481] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-249a4571-5662-4584-b2be-7bbebdba40d9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.684953] env[68674]: DEBUG oslo_vmware.api [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 711.684953] env[68674]: value = "task-3239775" [ 711.684953] env[68674]: _type = "Task" [ 711.684953] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.687754] env[68674]: DEBUG nova.compute.manager [req-9a8df433-a338-4016-b979-18be1a6b0f65 req-7bf224da-f9d6-4143-a999-4e54a751e938 service nova] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Received event network-vif-plugged-328800af-995b-4980-a8a3-4a51aa3c17e5 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 711.687957] env[68674]: DEBUG oslo_concurrency.lockutils [req-9a8df433-a338-4016-b979-18be1a6b0f65 req-7bf224da-f9d6-4143-a999-4e54a751e938 service nova] Acquiring lock "3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 711.688460] env[68674]: DEBUG oslo_concurrency.lockutils [req-9a8df433-a338-4016-b979-18be1a6b0f65 req-7bf224da-f9d6-4143-a999-4e54a751e938 service nova] Lock "3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 711.688460] env[68674]: DEBUG oslo_concurrency.lockutils [req-9a8df433-a338-4016-b979-18be1a6b0f65 req-7bf224da-f9d6-4143-a999-4e54a751e938 service nova] Lock "3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 711.688581] env[68674]: DEBUG nova.compute.manager [req-9a8df433-a338-4016-b979-18be1a6b0f65 req-7bf224da-f9d6-4143-a999-4e54a751e938 service nova] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] No waiting events found dispatching network-vif-plugged-328800af-995b-4980-a8a3-4a51aa3c17e5 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 711.689016] env[68674]: WARNING nova.compute.manager [req-9a8df433-a338-4016-b979-18be1a6b0f65 req-7bf224da-f9d6-4143-a999-4e54a751e938 service nova] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Received unexpected event network-vif-plugged-328800af-995b-4980-a8a3-4a51aa3c17e5 for instance with vm_state building and task_state spawning. [ 711.698084] env[68674]: DEBUG oslo_vmware.api [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239775, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.703250] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.704s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 711.703785] env[68674]: DEBUG nova.compute.manager [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 711.706383] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 41.152s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 711.806549] env[68674]: DEBUG nova.network.neutron [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Successfully updated port: 328800af-995b-4980-a8a3-4a51aa3c17e5 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 712.045856] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d418eace-cfba-4b40-8806-482dbab1f39d tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Releasing lock "refresh_cache-367461db-8bc4-4cf0-b7f6-f79ee2bf8589" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 712.045856] env[68674]: DEBUG nova.compute.manager [None req-d418eace-cfba-4b40-8806-482dbab1f39d tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 712.046261] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d418eace-cfba-4b40-8806-482dbab1f39d tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 712.046913] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6543531d-5bb9-4ff9-b26f-98f6266b629a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.054833] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d418eace-cfba-4b40-8806-482dbab1f39d tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 712.055159] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ae3835c8-1867-4534-9d5c-761ca27f0f1e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.062721] env[68674]: DEBUG oslo_vmware.api [None req-d418eace-cfba-4b40-8806-482dbab1f39d tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Waiting for the task: (returnval){ [ 712.062721] env[68674]: value = "task-3239776" [ 712.062721] env[68674]: _type = "Task" [ 712.062721] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.071266] env[68674]: DEBUG oslo_vmware.api [None req-d418eace-cfba-4b40-8806-482dbab1f39d tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': task-3239776, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.197358] env[68674]: DEBUG oslo_vmware.api [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239775, 'name': PowerOnVM_Task} progress is 90%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.210190] env[68674]: DEBUG nova.compute.utils [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 712.214226] env[68674]: INFO nova.compute.claims [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 712.218844] env[68674]: DEBUG nova.compute.manager [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 712.219078] env[68674]: DEBUG nova.network.neutron [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 712.276325] env[68674]: DEBUG nova.policy [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8942b53e01ca49c38b9c8be52bfa02fb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7d1b163422ef4e798ffc2ef3c5c7d2ba', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 712.306402] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 712.306798] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9ee40eaf-9415-4977-a821-e0374af23387 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.308938] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Acquiring lock "refresh_cache-3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.309118] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Acquired lock "refresh_cache-3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 712.309245] env[68674]: DEBUG nova.network.neutron [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 712.317128] env[68674]: DEBUG oslo_vmware.api [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 712.317128] env[68674]: value = "task-3239777" [ 712.317128] env[68674]: _type = "Task" [ 712.317128] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.329880] env[68674]: DEBUG oslo_vmware.api [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239777, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.573159] env[68674]: DEBUG oslo_vmware.api [None req-d418eace-cfba-4b40-8806-482dbab1f39d tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': task-3239776, 'name': PowerOffVM_Task, 'duration_secs': 0.125141} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.573673] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d418eace-cfba-4b40-8806-482dbab1f39d tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 712.574104] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d418eace-cfba-4b40-8806-482dbab1f39d tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 712.574504] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d949ac9d-e4d7-4aa8-a453-90b86ed6e94f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.601547] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d418eace-cfba-4b40-8806-482dbab1f39d tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 712.601547] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d418eace-cfba-4b40-8806-482dbab1f39d tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 712.601547] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-d418eace-cfba-4b40-8806-482dbab1f39d tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Deleting the datastore file [datastore2] 367461db-8bc4-4cf0-b7f6-f79ee2bf8589 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 712.602064] env[68674]: DEBUG nova.network.neutron [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Successfully created port: 1ac32e60-87ff-4d72-ad65-cd7f723a0bfa {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 712.603967] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cee6eb6b-150f-4b6c-b4c1-81931fef365d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.611155] env[68674]: DEBUG oslo_vmware.api [None req-d418eace-cfba-4b40-8806-482dbab1f39d tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Waiting for the task: (returnval){ [ 712.611155] env[68674]: value = "task-3239779" [ 712.611155] env[68674]: _type = "Task" [ 712.611155] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.620720] env[68674]: DEBUG oslo_vmware.api [None req-d418eace-cfba-4b40-8806-482dbab1f39d tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': task-3239779, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.697681] env[68674]: DEBUG oslo_vmware.api [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239775, 'name': PowerOnVM_Task, 'duration_secs': 0.599707} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.697988] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 712.698228] env[68674]: INFO nova.compute.manager [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Took 9.57 seconds to spawn the instance on the hypervisor. [ 712.698413] env[68674]: DEBUG nova.compute.manager [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 712.699301] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66e9ff16-07e0-4f3d-ab55-395248199422 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.722622] env[68674]: DEBUG nova.compute.manager [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 712.724115] env[68674]: INFO nova.compute.resource_tracker [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Updating resource usage from migration 21a51122-368f-4c4d-9f78-bddd3b48ff9c [ 712.827290] env[68674]: DEBUG oslo_vmware.api [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239777, 'name': PowerOffVM_Task, 'duration_secs': 0.190994} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.830808] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 712.831099] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 712.832180] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7122a71e-f541-4d01-a356-bd28fffdfc4d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.838823] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 712.839432] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2e15ba21-8779-4fa1-b443-37e064bc713f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.861137] env[68674]: DEBUG nova.network.neutron [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 712.907183] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 712.907493] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 712.907715] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Deleting the datastore file [datastore2] baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 712.908168] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d757279a-127f-42d6-880e-abeb2d6f9af3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.914022] env[68674]: DEBUG oslo_vmware.api [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 712.914022] env[68674]: value = "task-3239781" [ 712.914022] env[68674]: _type = "Task" [ 712.914022] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.925035] env[68674]: DEBUG oslo_vmware.api [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239781, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.125296] env[68674]: DEBUG oslo_vmware.api [None req-d418eace-cfba-4b40-8806-482dbab1f39d tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Task: {'id': task-3239779, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.095646} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.126338] env[68674]: DEBUG nova.network.neutron [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Updating instance_info_cache with network_info: [{"id": "328800af-995b-4980-a8a3-4a51aa3c17e5", "address": "fa:16:3e:6a:2a:04", "network": {"id": "69e2ef02-944e-40b2-88f2-3a00c754a5eb", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-125455610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "435fbf1f847d4d36ba126fc8c49b59fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3e0aae3-33d1-403b-bfaf-306f77a1422e", "external-id": "nsx-vlan-transportzone-211", "segmentation_id": 211, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap328800af-99", "ovs_interfaceid": "328800af-995b-4980-a8a3-4a51aa3c17e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.127622] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-d418eace-cfba-4b40-8806-482dbab1f39d tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 713.128232] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d418eace-cfba-4b40-8806-482dbab1f39d tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 713.128232] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d418eace-cfba-4b40-8806-482dbab1f39d tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 713.128377] env[68674]: INFO nova.compute.manager [None req-d418eace-cfba-4b40-8806-482dbab1f39d tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Took 1.08 seconds to destroy the instance on the hypervisor. [ 713.128661] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d418eace-cfba-4b40-8806-482dbab1f39d tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 713.129126] env[68674]: DEBUG nova.compute.manager [-] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 713.129366] env[68674]: DEBUG nova.network.neutron [-] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 713.145547] env[68674]: DEBUG nova.network.neutron [-] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 713.220816] env[68674]: INFO nova.compute.manager [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Took 47.87 seconds to build instance. [ 713.304288] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa458418-4b9c-463c-be70-adf4bdbbf0a9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.314217] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-603e8294-b7a3-40b1-98bb-ec60bbf80c90 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.349132] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdbaa8ed-15dd-4be3-9a46-593ded76138d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.357076] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb3ab974-8979-422c-8862-59b511de893a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.370955] env[68674]: DEBUG nova.compute.provider_tree [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 713.423938] env[68674]: DEBUG oslo_vmware.api [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239781, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.200282} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.424231] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 713.424418] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 713.424595] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 713.629738] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Releasing lock "refresh_cache-3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 713.631166] env[68674]: DEBUG nova.compute.manager [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Instance network_info: |[{"id": "328800af-995b-4980-a8a3-4a51aa3c17e5", "address": "fa:16:3e:6a:2a:04", "network": {"id": "69e2ef02-944e-40b2-88f2-3a00c754a5eb", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-125455610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "435fbf1f847d4d36ba126fc8c49b59fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3e0aae3-33d1-403b-bfaf-306f77a1422e", "external-id": "nsx-vlan-transportzone-211", "segmentation_id": 211, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap328800af-99", "ovs_interfaceid": "328800af-995b-4980-a8a3-4a51aa3c17e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 713.631261] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6a:2a:04', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c3e0aae3-33d1-403b-bfaf-306f77a1422e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '328800af-995b-4980-a8a3-4a51aa3c17e5', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 713.638022] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Creating folder: Project (435fbf1f847d4d36ba126fc8c49b59fd). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 713.638326] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d5065516-1783-439b-bfea-c8fd8fe1950f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.650274] env[68674]: DEBUG nova.network.neutron [-] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.652426] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Created folder: Project (435fbf1f847d4d36ba126fc8c49b59fd) in parent group-v647377. [ 713.652655] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Creating folder: Instances. Parent ref: group-v647497. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 713.653124] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d91f7774-66c3-4d25-8e02-f0883e35c1b3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.664229] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Created folder: Instances in parent group-v647497. [ 713.664471] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 713.664668] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 713.664866] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f5636617-318e-483d-87d9-43e30ba8c9f6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.685473] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 713.685473] env[68674]: value = "task-3239784" [ 713.685473] env[68674]: _type = "Task" [ 713.685473] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.694952] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239784, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.722349] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ccaaf4cb-1f53-450f-89ea-344dfcef8418 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "d88ccf9b-7432-4be0-82f7-b2a9155f7d86" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.024s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 713.726071] env[68674]: DEBUG nova.compute.manager [req-eff22bf3-af29-4717-b48f-116fedc5b5e0 req-236159d8-2fd1-459c-aaa0-88072de75260 service nova] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Received event network-changed-328800af-995b-4980-a8a3-4a51aa3c17e5 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 713.726349] env[68674]: DEBUG nova.compute.manager [req-eff22bf3-af29-4717-b48f-116fedc5b5e0 req-236159d8-2fd1-459c-aaa0-88072de75260 service nova] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Refreshing instance network info cache due to event network-changed-328800af-995b-4980-a8a3-4a51aa3c17e5. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 713.726629] env[68674]: DEBUG oslo_concurrency.lockutils [req-eff22bf3-af29-4717-b48f-116fedc5b5e0 req-236159d8-2fd1-459c-aaa0-88072de75260 service nova] Acquiring lock "refresh_cache-3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.726818] env[68674]: DEBUG oslo_concurrency.lockutils [req-eff22bf3-af29-4717-b48f-116fedc5b5e0 req-236159d8-2fd1-459c-aaa0-88072de75260 service nova] Acquired lock "refresh_cache-3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 713.727041] env[68674]: DEBUG nova.network.neutron [req-eff22bf3-af29-4717-b48f-116fedc5b5e0 req-236159d8-2fd1-459c-aaa0-88072de75260 service nova] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Refreshing network info cache for port 328800af-995b-4980-a8a3-4a51aa3c17e5 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 713.734340] env[68674]: DEBUG nova.compute.manager [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 713.757017] env[68674]: DEBUG nova.virt.hardware [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 713.757017] env[68674]: DEBUG nova.virt.hardware [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 713.757017] env[68674]: DEBUG nova.virt.hardware [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 713.757229] env[68674]: DEBUG nova.virt.hardware [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 713.757229] env[68674]: DEBUG nova.virt.hardware [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 713.757560] env[68674]: DEBUG nova.virt.hardware [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 713.759635] env[68674]: DEBUG nova.virt.hardware [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 713.759635] env[68674]: DEBUG nova.virt.hardware [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 713.759635] env[68674]: DEBUG nova.virt.hardware [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 713.759635] env[68674]: DEBUG nova.virt.hardware [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 713.759635] env[68674]: DEBUG nova.virt.hardware [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 713.760648] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c133c1e-567e-4cdd-b653-2b76f0509388 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.769851] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac4f8550-34e0-4a56-b249-40dc4a965099 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.873945] env[68674]: DEBUG nova.scheduler.client.report [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 714.154715] env[68674]: INFO nova.compute.manager [-] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Took 1.03 seconds to deallocate network for instance. [ 714.196843] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239784, 'name': CreateVM_Task, 'duration_secs': 0.486409} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.197022] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 714.197688] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.197855] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 714.198198] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 714.198449] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7755e68c-18aa-4a93-a4ae-c090ad398352 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.203353] env[68674]: DEBUG oslo_vmware.api [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Waiting for the task: (returnval){ [ 714.203353] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5296db27-b8ec-94ea-dca4-9134ba105178" [ 714.203353] env[68674]: _type = "Task" [ 714.203353] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.211959] env[68674]: DEBUG oslo_vmware.api [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5296db27-b8ec-94ea-dca4-9134ba105178, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.228983] env[68674]: DEBUG nova.compute.manager [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 714.322494] env[68674]: INFO nova.compute.manager [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Rebuilding instance [ 714.380134] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.674s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 714.380389] env[68674]: INFO nova.compute.manager [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Migrating [ 714.380639] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.380785] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquired lock "compute-rpcapi-router" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 714.389094] env[68674]: DEBUG nova.compute.manager [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 714.389094] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.909s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 714.389094] env[68674]: INFO nova.compute.claims [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 714.389094] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee24e184-8b11-4261-8caa-a9593fa98e84 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.396399] env[68674]: INFO nova.compute.rpcapi [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Automatically selected compute RPC version 6.4 from minimum service version 68 [ 714.397065] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Releasing lock "compute-rpcapi-router" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 714.425231] env[68674]: DEBUG nova.network.neutron [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Successfully updated port: 1ac32e60-87ff-4d72-ad65-cd7f723a0bfa {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 714.477942] env[68674]: DEBUG nova.virt.hardware [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 714.481207] env[68674]: DEBUG nova.virt.hardware [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 714.481207] env[68674]: DEBUG nova.virt.hardware [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 714.481207] env[68674]: DEBUG nova.virt.hardware [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 714.481207] env[68674]: DEBUG nova.virt.hardware [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 714.481207] env[68674]: DEBUG nova.virt.hardware [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 714.481973] env[68674]: DEBUG nova.virt.hardware [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 714.481973] env[68674]: DEBUG nova.virt.hardware [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 714.481973] env[68674]: DEBUG nova.virt.hardware [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 714.481973] env[68674]: DEBUG nova.virt.hardware [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 714.481973] env[68674]: DEBUG nova.virt.hardware [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 714.482482] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6cc75b1-15bc-47aa-b685-fa6e5c485f89 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.491514] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca59c897-c613-402d-b5ca-1dbe29b8320d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.507998] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:f2:5f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b5c34919-7d52-4a52-bab1-81af4c8182ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a459b31b-865e-45d7-a62b-b7c95eb50c15', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 714.516026] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 714.518727] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 714.518949] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3f906167-13dd-44a5-a03c-5ee8e0286ceb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.539547] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 714.539547] env[68674]: value = "task-3239785" [ 714.539547] env[68674]: _type = "Task" [ 714.539547] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.547407] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239785, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.564570] env[68674]: DEBUG nova.network.neutron [req-eff22bf3-af29-4717-b48f-116fedc5b5e0 req-236159d8-2fd1-459c-aaa0-88072de75260 service nova] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Updated VIF entry in instance network info cache for port 328800af-995b-4980-a8a3-4a51aa3c17e5. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 714.564927] env[68674]: DEBUG nova.network.neutron [req-eff22bf3-af29-4717-b48f-116fedc5b5e0 req-236159d8-2fd1-459c-aaa0-88072de75260 service nova] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Updating instance_info_cache with network_info: [{"id": "328800af-995b-4980-a8a3-4a51aa3c17e5", "address": "fa:16:3e:6a:2a:04", "network": {"id": "69e2ef02-944e-40b2-88f2-3a00c754a5eb", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-125455610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "435fbf1f847d4d36ba126fc8c49b59fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3e0aae3-33d1-403b-bfaf-306f77a1422e", "external-id": "nsx-vlan-transportzone-211", "segmentation_id": 211, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap328800af-99", "ovs_interfaceid": "328800af-995b-4980-a8a3-4a51aa3c17e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.661928] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d418eace-cfba-4b40-8806-482dbab1f39d tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 714.714288] env[68674]: DEBUG oslo_vmware.api [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5296db27-b8ec-94ea-dca4-9134ba105178, 'name': SearchDatastore_Task, 'duration_secs': 0.009913} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.714608] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 714.714840] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 714.715099] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.715624] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 714.715624] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 714.715742] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5793e264-740e-49a9-96f1-d4b0e82a4b3f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.724704] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 714.724941] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 714.725742] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad757fbd-05ee-40e5-b5eb-0de907f49260 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.731153] env[68674]: DEBUG oslo_vmware.api [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Waiting for the task: (returnval){ [ 714.731153] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521fe341-65e1-39f1-4a97-cd7c1f31f302" [ 714.731153] env[68674]: _type = "Task" [ 714.731153] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.740965] env[68674]: DEBUG oslo_vmware.api [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521fe341-65e1-39f1-4a97-cd7c1f31f302, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.752901] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 714.922379] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquiring lock "refresh_cache-77fa5a89-961b-4c84-a75e-a5be0253677e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.922712] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquired lock "refresh_cache-77fa5a89-961b-4c84-a75e-a5be0253677e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 714.922957] env[68674]: DEBUG nova.network.neutron [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 714.930442] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Acquiring lock "refresh_cache-1b405b1f-ee1f-4e6e-9355-de8b5c26ab49" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.930626] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Acquired lock "refresh_cache-1b405b1f-ee1f-4e6e-9355-de8b5c26ab49" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 714.930814] env[68674]: DEBUG nova.network.neutron [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 715.049989] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239785, 'name': CreateVM_Task, 'duration_secs': 0.448528} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.050194] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 715.050930] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.051152] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 715.051739] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 715.052090] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62ca7d93-f8c2-476a-8edb-61a73a65fe05 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.059696] env[68674]: DEBUG oslo_vmware.api [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 715.059696] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d51386-9ed7-e772-f63b-764d569444ec" [ 715.059696] env[68674]: _type = "Task" [ 715.059696] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.069253] env[68674]: DEBUG oslo_concurrency.lockutils [req-eff22bf3-af29-4717-b48f-116fedc5b5e0 req-236159d8-2fd1-459c-aaa0-88072de75260 service nova] Releasing lock "refresh_cache-3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 715.069646] env[68674]: DEBUG oslo_vmware.api [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d51386-9ed7-e772-f63b-764d569444ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.241878] env[68674]: DEBUG oslo_vmware.api [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521fe341-65e1-39f1-4a97-cd7c1f31f302, 'name': SearchDatastore_Task, 'duration_secs': 0.009677} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.242644] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ede18f4-d1fb-4abf-8082-b2c219ce824d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.247622] env[68674]: DEBUG oslo_vmware.api [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Waiting for the task: (returnval){ [ 715.247622] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52e9bcd9-0953-2917-e3eb-a0447ac8cac1" [ 715.247622] env[68674]: _type = "Task" [ 715.247622] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.256827] env[68674]: DEBUG oslo_vmware.api [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52e9bcd9-0953-2917-e3eb-a0447ac8cac1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.437199] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 715.437440] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4c214d49-e13c-4bd1-a332-a4887f54bf2d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.444490] env[68674]: DEBUG oslo_vmware.api [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 715.444490] env[68674]: value = "task-3239786" [ 715.444490] env[68674]: _type = "Task" [ 715.444490] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.455388] env[68674]: DEBUG oslo_vmware.api [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239786, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.511888] env[68674]: DEBUG nova.network.neutron [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 715.577313] env[68674]: DEBUG oslo_vmware.api [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d51386-9ed7-e772-f63b-764d569444ec, 'name': SearchDatastore_Task, 'duration_secs': 0.010176} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.578724] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 715.578989] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 715.579227] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.579366] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 715.579536] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 715.580039] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e1177c35-f497-4a9d-b56b-2db3f0eda8a4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.590334] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 715.590334] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 715.596749] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66c13bb1-4fe6-4f74-bf59-5cd62f9ee66e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.603992] env[68674]: DEBUG oslo_vmware.api [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 715.603992] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b4deee-b758-b943-0935-7f72f8ce9672" [ 715.603992] env[68674]: _type = "Task" [ 715.603992] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.611629] env[68674]: DEBUG oslo_vmware.api [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b4deee-b758-b943-0935-7f72f8ce9672, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.663317] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f102fd01-6bbc-41b6-8941-3aa201772e62 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Acquiring lock "fa89e0b5-590d-43fb-bb11-02f8fdee0c2f" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 715.666459] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f102fd01-6bbc-41b6-8941-3aa201772e62 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Lock "fa89e0b5-590d-43fb-bb11-02f8fdee0c2f" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 715.764245] env[68674]: DEBUG oslo_vmware.api [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52e9bcd9-0953-2917-e3eb-a0447ac8cac1, 'name': SearchDatastore_Task, 'duration_secs': 0.010066} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.767387] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 715.767720] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca/3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 715.768814] env[68674]: DEBUG nova.network.neutron [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Updating instance_info_cache with network_info: [{"id": "1ac32e60-87ff-4d72-ad65-cd7f723a0bfa", "address": "fa:16:3e:d9:4c:62", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.139", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ac32e60-87", "ovs_interfaceid": "1ac32e60-87ff-4d72-ad65-cd7f723a0bfa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.772885] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0ddb3b51-0770-47e4-bb55-793e09e8d0f4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.782094] env[68674]: DEBUG oslo_vmware.api [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Waiting for the task: (returnval){ [ 715.782094] env[68674]: value = "task-3239787" [ 715.782094] env[68674]: _type = "Task" [ 715.782094] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.796379] env[68674]: DEBUG oslo_vmware.api [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3239787, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.809115] env[68674]: DEBUG nova.network.neutron [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Updating instance_info_cache with network_info: [{"id": "b1a3099e-550f-4bc4-a4b5-1fe1e04ea342", "address": "fa:16:3e:dc:4d:99", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1a3099e-55", "ovs_interfaceid": "b1a3099e-550f-4bc4-a4b5-1fe1e04ea342", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.955200] env[68674]: DEBUG oslo_vmware.api [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239786, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.993469] env[68674]: DEBUG nova.compute.manager [req-59a890b6-abb9-403b-a76e-86f1e849bb6d req-e659a208-64e4-43b6-b2a9-6c04fea9677b service nova] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Received event network-vif-plugged-1ac32e60-87ff-4d72-ad65-cd7f723a0bfa {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 715.994156] env[68674]: DEBUG oslo_concurrency.lockutils [req-59a890b6-abb9-403b-a76e-86f1e849bb6d req-e659a208-64e4-43b6-b2a9-6c04fea9677b service nova] Acquiring lock "1b405b1f-ee1f-4e6e-9355-de8b5c26ab49-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 715.994156] env[68674]: DEBUG oslo_concurrency.lockutils [req-59a890b6-abb9-403b-a76e-86f1e849bb6d req-e659a208-64e4-43b6-b2a9-6c04fea9677b service nova] Lock "1b405b1f-ee1f-4e6e-9355-de8b5c26ab49-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 715.994457] env[68674]: DEBUG oslo_concurrency.lockutils [req-59a890b6-abb9-403b-a76e-86f1e849bb6d req-e659a208-64e4-43b6-b2a9-6c04fea9677b service nova] Lock "1b405b1f-ee1f-4e6e-9355-de8b5c26ab49-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 715.994684] env[68674]: DEBUG nova.compute.manager [req-59a890b6-abb9-403b-a76e-86f1e849bb6d req-e659a208-64e4-43b6-b2a9-6c04fea9677b service nova] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] No waiting events found dispatching network-vif-plugged-1ac32e60-87ff-4d72-ad65-cd7f723a0bfa {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 715.994928] env[68674]: WARNING nova.compute.manager [req-59a890b6-abb9-403b-a76e-86f1e849bb6d req-e659a208-64e4-43b6-b2a9-6c04fea9677b service nova] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Received unexpected event network-vif-plugged-1ac32e60-87ff-4d72-ad65-cd7f723a0bfa for instance with vm_state building and task_state spawning. [ 715.995178] env[68674]: DEBUG nova.compute.manager [req-59a890b6-abb9-403b-a76e-86f1e849bb6d req-e659a208-64e4-43b6-b2a9-6c04fea9677b service nova] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Received event network-changed-1ac32e60-87ff-4d72-ad65-cd7f723a0bfa {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 715.995412] env[68674]: DEBUG nova.compute.manager [req-59a890b6-abb9-403b-a76e-86f1e849bb6d req-e659a208-64e4-43b6-b2a9-6c04fea9677b service nova] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Refreshing instance network info cache due to event network-changed-1ac32e60-87ff-4d72-ad65-cd7f723a0bfa. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 715.995680] env[68674]: DEBUG oslo_concurrency.lockutils [req-59a890b6-abb9-403b-a76e-86f1e849bb6d req-e659a208-64e4-43b6-b2a9-6c04fea9677b service nova] Acquiring lock "refresh_cache-1b405b1f-ee1f-4e6e-9355-de8b5c26ab49" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.010393] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1496642-77d3-4c10-99b3-52456e96abe5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.018408] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-163da034-df4a-48a1-a37b-05e7f9322cce {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.052256] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aff37960-0e00-4402-b2fe-020411afafa9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.060478] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f73ec7ee-ec1a-4ffa-a92c-132afdc01416 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.074734] env[68674]: DEBUG nova.compute.provider_tree [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 716.114813] env[68674]: DEBUG oslo_vmware.api [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b4deee-b758-b943-0935-7f72f8ce9672, 'name': SearchDatastore_Task, 'duration_secs': 0.009671} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.115798] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a5f0645-b6b0-4937-a525-46c31b2c01fc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.121975] env[68674]: DEBUG oslo_vmware.api [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 716.121975] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b07b7d-e9fb-add8-a8ef-1ef2b0336930" [ 716.121975] env[68674]: _type = "Task" [ 716.121975] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.130759] env[68674]: DEBUG oslo_vmware.api [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b07b7d-e9fb-add8-a8ef-1ef2b0336930, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.170239] env[68674]: DEBUG nova.compute.utils [None req-f102fd01-6bbc-41b6-8941-3aa201772e62 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 716.277702] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Releasing lock "refresh_cache-1b405b1f-ee1f-4e6e-9355-de8b5c26ab49" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 716.278140] env[68674]: DEBUG nova.compute.manager [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Instance network_info: |[{"id": "1ac32e60-87ff-4d72-ad65-cd7f723a0bfa", "address": "fa:16:3e:d9:4c:62", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.139", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ac32e60-87", "ovs_interfaceid": "1ac32e60-87ff-4d72-ad65-cd7f723a0bfa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 716.279240] env[68674]: DEBUG oslo_concurrency.lockutils [req-59a890b6-abb9-403b-a76e-86f1e849bb6d req-e659a208-64e4-43b6-b2a9-6c04fea9677b service nova] Acquired lock "refresh_cache-1b405b1f-ee1f-4e6e-9355-de8b5c26ab49" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 716.279617] env[68674]: DEBUG nova.network.neutron [req-59a890b6-abb9-403b-a76e-86f1e849bb6d req-e659a208-64e4-43b6-b2a9-6c04fea9677b service nova] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Refreshing network info cache for port 1ac32e60-87ff-4d72-ad65-cd7f723a0bfa {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 716.282877] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d9:4c:62', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'df1bf911-aac9-4d2d-ae69-66ace3e6a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1ac32e60-87ff-4d72-ad65-cd7f723a0bfa', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 716.295804] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 716.300945] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 716.306933] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4f6b5012-715f-4e7c-9de3-a831d40e23f6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.326934] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Releasing lock "refresh_cache-77fa5a89-961b-4c84-a75e-a5be0253677e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 716.334922] env[68674]: DEBUG oslo_vmware.api [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3239787, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.336738] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 716.336738] env[68674]: value = "task-3239788" [ 716.336738] env[68674]: _type = "Task" [ 716.336738] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.345909] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239788, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.456409] env[68674]: DEBUG oslo_vmware.api [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239786, 'name': PowerOffVM_Task, 'duration_secs': 0.737915} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.456672] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 716.456910] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 716.457688] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47105b4d-4942-4346-8bd4-1a3594831ad9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.463930] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 716.464180] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-664f3a32-a78c-4ad6-8413-f29780fee00a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.537407] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 716.537627] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 716.537810] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Deleting the datastore file [datastore2] d88ccf9b-7432-4be0-82f7-b2a9155f7d86 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 716.538071] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-95b29042-0d76-4087-ab1f-05494449a57e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.542528] env[68674]: DEBUG nova.network.neutron [req-59a890b6-abb9-403b-a76e-86f1e849bb6d req-e659a208-64e4-43b6-b2a9-6c04fea9677b service nova] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Updated VIF entry in instance network info cache for port 1ac32e60-87ff-4d72-ad65-cd7f723a0bfa. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 716.542855] env[68674]: DEBUG nova.network.neutron [req-59a890b6-abb9-403b-a76e-86f1e849bb6d req-e659a208-64e4-43b6-b2a9-6c04fea9677b service nova] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Updating instance_info_cache with network_info: [{"id": "1ac32e60-87ff-4d72-ad65-cd7f723a0bfa", "address": "fa:16:3e:d9:4c:62", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.139", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ac32e60-87", "ovs_interfaceid": "1ac32e60-87ff-4d72-ad65-cd7f723a0bfa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.547845] env[68674]: DEBUG oslo_vmware.api [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 716.547845] env[68674]: value = "task-3239790" [ 716.547845] env[68674]: _type = "Task" [ 716.547845] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.555971] env[68674]: DEBUG oslo_vmware.api [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239790, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.578147] env[68674]: DEBUG nova.scheduler.client.report [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 716.632202] env[68674]: DEBUG oslo_vmware.api [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b07b7d-e9fb-add8-a8ef-1ef2b0336930, 'name': SearchDatastore_Task, 'duration_secs': 0.01886} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.632372] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 716.632566] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82/baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 716.632810] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8e329115-80b7-4f39-a04d-ec9e57ae8828 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.639315] env[68674]: DEBUG oslo_vmware.api [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 716.639315] env[68674]: value = "task-3239791" [ 716.639315] env[68674]: _type = "Task" [ 716.639315] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.646719] env[68674]: DEBUG oslo_vmware.api [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239791, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.673434] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f102fd01-6bbc-41b6-8941-3aa201772e62 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Lock "fa89e0b5-590d-43fb-bb11-02f8fdee0c2f" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.010s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 716.809919] env[68674]: DEBUG oslo_vmware.api [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3239787, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.662474} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.810095] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca/3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 716.810163] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 716.810843] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8017a410-408b-4a15-9368-e3ea605f1f91 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.817316] env[68674]: DEBUG oslo_vmware.api [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Waiting for the task: (returnval){ [ 716.817316] env[68674]: value = "task-3239792" [ 716.817316] env[68674]: _type = "Task" [ 716.817316] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.825707] env[68674]: DEBUG oslo_vmware.api [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3239792, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.847014] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239788, 'name': CreateVM_Task, 'duration_secs': 0.47982} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.847367] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 716.848053] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.848189] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 716.848831] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 716.849247] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34ba932c-8765-4d70-9890-e5b093e92587 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.856066] env[68674]: DEBUG oslo_vmware.api [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Waiting for the task: (returnval){ [ 716.856066] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ff441d-366f-b25c-c49e-a3cd1be739b2" [ 716.856066] env[68674]: _type = "Task" [ 716.856066] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.861790] env[68674]: DEBUG oslo_vmware.api [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ff441d-366f-b25c-c49e-a3cd1be739b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.046566] env[68674]: DEBUG oslo_concurrency.lockutils [req-59a890b6-abb9-403b-a76e-86f1e849bb6d req-e659a208-64e4-43b6-b2a9-6c04fea9677b service nova] Releasing lock "refresh_cache-1b405b1f-ee1f-4e6e-9355-de8b5c26ab49" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 717.061335] env[68674]: DEBUG oslo_vmware.api [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239790, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.239771} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.061529] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 717.061654] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 717.061834] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 717.083120] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.700s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 717.083794] env[68674]: DEBUG nova.compute.manager [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 717.086537] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 41.355s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 717.153517] env[68674]: DEBUG oslo_vmware.api [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239791, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.328902] env[68674]: DEBUG oslo_vmware.api [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3239792, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062975} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.330201] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 717.331472] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f573ae61-3515-41db-a75a-612f8f59e9e9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.358530] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca/3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 717.359647] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f746d13b-7173-483c-bf03-0cb2e6d4ec0b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.384679] env[68674]: DEBUG oslo_vmware.api [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ff441d-366f-b25c-c49e-a3cd1be739b2, 'name': SearchDatastore_Task, 'duration_secs': 0.009355} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.385905] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 717.387028] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 717.387028] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.387028] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 717.387028] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 717.387028] env[68674]: DEBUG oslo_vmware.api [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Waiting for the task: (returnval){ [ 717.387028] env[68674]: value = "task-3239793" [ 717.387028] env[68674]: _type = "Task" [ 717.387325] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.387325] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c7cad08-ef76-4de6-8770-7bcaa871bccf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.397653] env[68674]: DEBUG oslo_vmware.api [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3239793, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.398721] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 717.398957] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 717.399654] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-971e4eaf-0fd3-4996-b467-dd6b9dc206c2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.405350] env[68674]: DEBUG oslo_vmware.api [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Waiting for the task: (returnval){ [ 717.405350] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ac2f17-8bfe-cd26-66af-98540dc4967c" [ 717.405350] env[68674]: _type = "Task" [ 717.405350] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.414104] env[68674]: DEBUG oslo_vmware.api [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ac2f17-8bfe-cd26-66af-98540dc4967c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.590566] env[68674]: DEBUG nova.compute.utils [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 717.607134] env[68674]: DEBUG nova.compute.manager [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 717.609810] env[68674]: DEBUG nova.compute.manager [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 717.609980] env[68674]: DEBUG nova.network.neutron [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 717.649865] env[68674]: DEBUG oslo_vmware.api [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239791, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.539327} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.651439] env[68674]: DEBUG nova.policy [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '873bf773b9d64551a14458b3748583a8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1c2671e9238b4d00b011b7e521ca73bb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 717.652953] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82/baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 717.653196] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 717.653486] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5d32bd42-d482-4708-ad0a-4522d4f3ddca {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.659557] env[68674]: DEBUG oslo_vmware.api [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 717.659557] env[68674]: value = "task-3239794" [ 717.659557] env[68674]: _type = "Task" [ 717.659557] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.669741] env[68674]: DEBUG oslo_vmware.api [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239794, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.738756] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f102fd01-6bbc-41b6-8941-3aa201772e62 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Acquiring lock "fa89e0b5-590d-43fb-bb11-02f8fdee0c2f" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 717.739128] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f102fd01-6bbc-41b6-8941-3aa201772e62 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Lock "fa89e0b5-590d-43fb-bb11-02f8fdee0c2f" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 717.739393] env[68674]: INFO nova.compute.manager [None req-f102fd01-6bbc-41b6-8941-3aa201772e62 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Attaching volume 600a9d1f-0ffd-4d51-89b0-9bdf3c055337 to /dev/sdb [ 717.775094] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f79e7af-273f-4bca-ae29-337e098de925 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.783592] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6193bde1-64fb-44a6-b417-fc3f310f7ae0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.797651] env[68674]: DEBUG nova.virt.block_device [None req-f102fd01-6bbc-41b6-8941-3aa201772e62 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Updating existing volume attachment record: bb969a55-0f3e-4529-8e56-1b4b9937feb3 {{(pid=68674) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 717.842637] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b95b035b-b8de-4a22-b888-d05ef321e57a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.864283] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Updating instance '77fa5a89-961b-4c84-a75e-a5be0253677e' progress to 0 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 717.904154] env[68674]: DEBUG oslo_vmware.api [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3239793, 'name': ReconfigVM_Task, 'duration_secs': 0.281108} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.904154] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Reconfigured VM instance instance-00000029 to attach disk [datastore1] 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca/3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 717.904465] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-08a5239f-dbab-4052-97b5-2417f3a36878 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.920387] env[68674]: DEBUG oslo_vmware.api [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ac2f17-8bfe-cd26-66af-98540dc4967c, 'name': SearchDatastore_Task, 'duration_secs': 0.008533} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.922918] env[68674]: DEBUG oslo_vmware.api [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Waiting for the task: (returnval){ [ 717.922918] env[68674]: value = "task-3239795" [ 717.922918] env[68674]: _type = "Task" [ 717.922918] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.923298] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ac5cd91-71f6-4455-924a-b01667bdf5e7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.936238] env[68674]: DEBUG oslo_vmware.api [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3239795, 'name': Rename_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.937812] env[68674]: DEBUG oslo_vmware.api [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Waiting for the task: (returnval){ [ 717.937812] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5296fcd2-18a8-36e9-29a1-8589c2bc0e93" [ 717.937812] env[68674]: _type = "Task" [ 717.937812] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.949858] env[68674]: DEBUG oslo_vmware.api [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5296fcd2-18a8-36e9-29a1-8589c2bc0e93, 'name': SearchDatastore_Task, 'duration_secs': 0.009925} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.949858] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 717.949858] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49/1b405b1f-ee1f-4e6e-9355-de8b5c26ab49.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 717.949858] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c7b8eabf-8cfa-43ec-b095-7bb896c9755b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.956762] env[68674]: DEBUG oslo_vmware.api [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Waiting for the task: (returnval){ [ 717.956762] env[68674]: value = "task-3239797" [ 717.956762] env[68674]: _type = "Task" [ 717.956762] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.967610] env[68674]: DEBUG oslo_vmware.api [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': task-3239797, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.025701] env[68674]: DEBUG nova.network.neutron [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Successfully created port: 1dd86562-12e7-45c7-9bc1-e9b4ed28e43e {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 718.099436] env[68674]: DEBUG nova.virt.hardware [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 718.099738] env[68674]: DEBUG nova.virt.hardware [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 718.099909] env[68674]: DEBUG nova.virt.hardware [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 718.100109] env[68674]: DEBUG nova.virt.hardware [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 718.100303] env[68674]: DEBUG nova.virt.hardware [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 718.100408] env[68674]: DEBUG nova.virt.hardware [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 718.100716] env[68674]: DEBUG nova.virt.hardware [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 718.100918] env[68674]: DEBUG nova.virt.hardware [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 718.101167] env[68674]: DEBUG nova.virt.hardware [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 718.101348] env[68674]: DEBUG nova.virt.hardware [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 718.101542] env[68674]: DEBUG nova.virt.hardware [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 718.102444] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed1eeee5-2e15-484e-856d-80a034c19b4d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.106932] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Applying migration context for instance 77fa5a89-961b-4c84-a75e-a5be0253677e as it has an incoming, in-progress migration 21a51122-368f-4c4d-9f78-bddd3b48ff9c. Migration status is migrating {{(pid=68674) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 718.109148] env[68674]: INFO nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Updating resource usage from migration 21a51122-368f-4c4d-9f78-bddd3b48ff9c [ 718.122727] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-203ad7c5-f488-41a5-aca7-d9402539ca3d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.137747] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:2b:e7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '08fb4857-7f9b-4f97-86ef-415341fb595d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '61791433-aab7-4244-91a2-6caef49a0717', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 718.145172] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 718.146423] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 718.147033] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 02d4aee3-7267-4658-a277-8a9a00dd9f6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 718.147033] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance e75d2bc7-f356-4443-9641-d9ebf35843cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 718.147033] env[68674]: WARNING nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 160d9aa2-048d-45a2-ab55-581c8721ac3b is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 718.147033] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 503e9328-bbd8-414f-8bea-250ed8247d67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 718.147236] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 3d85c8c4-f09c-4f75-aff5-9a49d84ae006 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 718.147236] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 357b515d-ef37-4688-969e-f894be30edb7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 718.147303] env[68674]: WARNING nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 50bb7509-b7e9-4dc3-9746-acd46010cc26 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 718.147475] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance fa89e0b5-590d-43fb-bb11-02f8fdee0c2f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 718.147515] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 3463e09e-dc2f-432c-9eff-8192c2616240 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 718.147641] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 627fb348-1749-4480-97b9-b479a182d4ee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 718.147758] env[68674]: WARNING nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 718.147890] env[68674]: WARNING nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 275cdfcc-06f0-4c29-b18b-55cde38480a3 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 718.147992] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 045e54ff-9e2c-4b04-afac-34cb6580cb2c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 718.148119] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 23891bad-1b63-4237-9243-78954cf67d52 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 718.148233] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 6278d756-139c-4fcd-bf31-304c978d6682 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 718.148357] env[68674]: WARNING nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance d167585b-11f4-462c-b12e-c6a440c1476a is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 718.149046] env[68674]: WARNING nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 0f618d12-dc7b-4739-8ace-9453a7175d75 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 718.149046] env[68674]: WARNING nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 718.149046] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 505b0352-39ab-4841-8766-14626af2b13e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 718.149046] env[68674]: WARNING nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 367461db-8bc4-4cf0-b7f6-f79ee2bf8589 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 718.149046] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 1189fa93-608b-4684-a675-f1caf29a9f43 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 718.149326] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance d88ccf9b-7432-4be0-82f7-b2a9155f7d86 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 718.149326] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 718.149326] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 718.149412] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 3c8459db-cc54-4644-8e4c-83c87017a186 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 718.152027] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 718.152027] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8a07afe0-50b0-4f02-bfef-d7f4ee444fb5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.181285] env[68674]: DEBUG oslo_vmware.api [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239794, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063312} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.182502] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 718.182846] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 718.182846] env[68674]: value = "task-3239799" [ 718.182846] env[68674]: _type = "Task" [ 718.182846] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.183409] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7d69018-6fb7-452a-8b39-78e094ce8722 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.213168] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Reconfiguring VM instance instance-00000005 to attach disk [datastore2] baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82/baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 718.213168] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c1636e4b-d2fa-47a9-95c3-211f61b4b0fe {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.232203] env[68674]: DEBUG oslo_vmware.api [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 718.232203] env[68674]: value = "task-3239801" [ 718.232203] env[68674]: _type = "Task" [ 718.232203] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.241007] env[68674]: DEBUG oslo_vmware.api [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239801, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.372450] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 718.372834] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-995e8015-1ccc-4b77-bf44-56707891319f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.380232] env[68674]: DEBUG oslo_vmware.api [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 718.380232] env[68674]: value = "task-3239802" [ 718.380232] env[68674]: _type = "Task" [ 718.380232] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.389114] env[68674]: DEBUG oslo_vmware.api [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3239802, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.439314] env[68674]: DEBUG oslo_vmware.api [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3239795, 'name': Rename_Task, 'duration_secs': 0.166653} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.439314] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 718.439314] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-96be13e6-41dc-4ead-a9e5-8df439109072 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.446253] env[68674]: DEBUG oslo_vmware.api [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Waiting for the task: (returnval){ [ 718.446253] env[68674]: value = "task-3239803" [ 718.446253] env[68674]: _type = "Task" [ 718.446253] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.456386] env[68674]: DEBUG oslo_vmware.api [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3239803, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.466215] env[68674]: DEBUG oslo_vmware.api [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': task-3239797, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.622760] env[68674]: DEBUG nova.compute.manager [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 718.648437] env[68674]: DEBUG nova.virt.hardware [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 718.648716] env[68674]: DEBUG nova.virt.hardware [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 718.648875] env[68674]: DEBUG nova.virt.hardware [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 718.649072] env[68674]: DEBUG nova.virt.hardware [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 718.649227] env[68674]: DEBUG nova.virt.hardware [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 718.649378] env[68674]: DEBUG nova.virt.hardware [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 718.649668] env[68674]: DEBUG nova.virt.hardware [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 718.649887] env[68674]: DEBUG nova.virt.hardware [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 718.650113] env[68674]: DEBUG nova.virt.hardware [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 718.650331] env[68674]: DEBUG nova.virt.hardware [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 718.650546] env[68674]: DEBUG nova.virt.hardware [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 718.651559] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7576f19-547c-40b0-a642-06da56c2866f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.654803] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 0eaf7d72-755b-4977-8f71-7d53ad1cf573 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 718.662090] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f10e55fd-72fc-4cc4-b4b2-37c0d91a4301 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.694250] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239799, 'name': CreateVM_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.742901] env[68674]: DEBUG oslo_vmware.api [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239801, 'name': ReconfigVM_Task, 'duration_secs': 0.31401} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.743263] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Reconfigured VM instance instance-00000005 to attach disk [datastore2] baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82/baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 718.744034] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6cf76290-3f03-4fee-9ffc-181c0c2f28b6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.750291] env[68674]: DEBUG oslo_vmware.api [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 718.750291] env[68674]: value = "task-3239804" [ 718.750291] env[68674]: _type = "Task" [ 718.750291] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.758215] env[68674]: DEBUG oslo_vmware.api [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239804, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.892980] env[68674]: DEBUG oslo_vmware.api [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3239802, 'name': PowerOffVM_Task, 'duration_secs': 0.231301} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.893288] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 718.893539] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Updating instance '77fa5a89-961b-4c84-a75e-a5be0253677e' progress to 17 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 718.959574] env[68674]: DEBUG oslo_vmware.api [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3239803, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.969255] env[68674]: DEBUG oslo_vmware.api [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': task-3239797, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.157874] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 6803af03-b1d5-47e6-9471-5213469e4103 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 719.200531] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239799, 'name': CreateVM_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.261480] env[68674]: DEBUG oslo_vmware.api [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239804, 'name': Rename_Task, 'duration_secs': 0.268415} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.261769] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 719.262008] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-79cf155a-7d08-4220-b17d-16830d4e60b9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.267894] env[68674]: DEBUG oslo_vmware.api [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 719.267894] env[68674]: value = "task-3239805" [ 719.267894] env[68674]: _type = "Task" [ 719.267894] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.275748] env[68674]: DEBUG oslo_vmware.api [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239805, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.401183] env[68674]: DEBUG nova.virt.hardware [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 719.401483] env[68674]: DEBUG nova.virt.hardware [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 719.401665] env[68674]: DEBUG nova.virt.hardware [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 719.401849] env[68674]: DEBUG nova.virt.hardware [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 719.401999] env[68674]: DEBUG nova.virt.hardware [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 719.402228] env[68674]: DEBUG nova.virt.hardware [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 719.402439] env[68674]: DEBUG nova.virt.hardware [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 719.402611] env[68674]: DEBUG nova.virt.hardware [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 719.402807] env[68674]: DEBUG nova.virt.hardware [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 719.402970] env[68674]: DEBUG nova.virt.hardware [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 719.403171] env[68674]: DEBUG nova.virt.hardware [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 719.408303] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e217729d-343a-463f-930e-54e4e332a0e2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.423207] env[68674]: DEBUG oslo_vmware.api [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 719.423207] env[68674]: value = "task-3239806" [ 719.423207] env[68674]: _type = "Task" [ 719.423207] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.431840] env[68674]: DEBUG oslo_vmware.api [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3239806, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.454786] env[68674]: DEBUG oslo_vmware.api [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3239803, 'name': PowerOnVM_Task, 'duration_secs': 0.547798} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.455070] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 719.455350] env[68674]: INFO nova.compute.manager [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Took 8.42 seconds to spawn the instance on the hypervisor. [ 719.455507] env[68674]: DEBUG nova.compute.manager [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 719.456495] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-191b0eaf-cc2f-43a1-8127-2b0bf6c983d0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.468356] env[68674]: DEBUG oslo_vmware.api [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': task-3239797, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.258364} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.470043] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49/1b405b1f-ee1f-4e6e-9355-de8b5c26ab49.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 719.470264] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 719.473725] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-40ccb625-386c-4f63-917b-cae7cd3af97d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.481219] env[68674]: DEBUG oslo_vmware.api [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Waiting for the task: (returnval){ [ 719.481219] env[68674]: value = "task-3239807" [ 719.481219] env[68674]: _type = "Task" [ 719.481219] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.490816] env[68674]: DEBUG oslo_vmware.api [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': task-3239807, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.496329] env[68674]: DEBUG nova.compute.manager [req-30d2543a-879f-4f21-8afe-aa6227fd4cdd req-da2ca654-07ee-46f3-b8af-1d68a26b8861 service nova] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Received event network-vif-plugged-1dd86562-12e7-45c7-9bc1-e9b4ed28e43e {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 719.496329] env[68674]: DEBUG oslo_concurrency.lockutils [req-30d2543a-879f-4f21-8afe-aa6227fd4cdd req-da2ca654-07ee-46f3-b8af-1d68a26b8861 service nova] Acquiring lock "3c8459db-cc54-4644-8e4c-83c87017a186-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 719.496329] env[68674]: DEBUG oslo_concurrency.lockutils [req-30d2543a-879f-4f21-8afe-aa6227fd4cdd req-da2ca654-07ee-46f3-b8af-1d68a26b8861 service nova] Lock "3c8459db-cc54-4644-8e4c-83c87017a186-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 719.496329] env[68674]: DEBUG oslo_concurrency.lockutils [req-30d2543a-879f-4f21-8afe-aa6227fd4cdd req-da2ca654-07ee-46f3-b8af-1d68a26b8861 service nova] Lock "3c8459db-cc54-4644-8e4c-83c87017a186-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 719.496613] env[68674]: DEBUG nova.compute.manager [req-30d2543a-879f-4f21-8afe-aa6227fd4cdd req-da2ca654-07ee-46f3-b8af-1d68a26b8861 service nova] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] No waiting events found dispatching network-vif-plugged-1dd86562-12e7-45c7-9bc1-e9b4ed28e43e {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 719.496659] env[68674]: WARNING nova.compute.manager [req-30d2543a-879f-4f21-8afe-aa6227fd4cdd req-da2ca654-07ee-46f3-b8af-1d68a26b8861 service nova] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Received unexpected event network-vif-plugged-1dd86562-12e7-45c7-9bc1-e9b4ed28e43e for instance with vm_state building and task_state spawning. [ 719.616018] env[68674]: DEBUG nova.network.neutron [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Successfully updated port: 1dd86562-12e7-45c7-9bc1-e9b4ed28e43e {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 719.661723] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 719.696431] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239799, 'name': CreateVM_Task, 'duration_secs': 1.204119} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.696608] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 719.697680] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.697680] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 719.697843] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 719.698097] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83057f2f-c31b-4999-87a9-e89e22b328b7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.702767] env[68674]: DEBUG oslo_vmware.api [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 719.702767] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528f90a4-1e6b-449f-3653-1c6246e31d24" [ 719.702767] env[68674]: _type = "Task" [ 719.702767] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.710902] env[68674]: DEBUG oslo_vmware.api [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528f90a4-1e6b-449f-3653-1c6246e31d24, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.777583] env[68674]: DEBUG oslo_vmware.api [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239805, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.934971] env[68674]: DEBUG oslo_vmware.api [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3239806, 'name': ReconfigVM_Task, 'duration_secs': 0.265507} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.935350] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Updating instance '77fa5a89-961b-4c84-a75e-a5be0253677e' progress to 33 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 719.983820] env[68674]: INFO nova.compute.manager [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Took 50.12 seconds to build instance. [ 719.994716] env[68674]: DEBUG oslo_vmware.api [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': task-3239807, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064912} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.995029] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 719.995780] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d69160d8-9f62-4d97-a02c-d953cf9f5622 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.022568] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Reconfiguring VM instance instance-0000002a to attach disk [datastore2] 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49/1b405b1f-ee1f-4e6e-9355-de8b5c26ab49.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 720.023787] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5e24e878-32e2-401f-af07-f0d5c89994d4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.043781] env[68674]: DEBUG oslo_vmware.api [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Waiting for the task: (returnval){ [ 720.043781] env[68674]: value = "task-3239808" [ 720.043781] env[68674]: _type = "Task" [ 720.043781] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.052475] env[68674]: DEBUG oslo_vmware.api [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': task-3239808, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.119324] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Acquiring lock "refresh_cache-3c8459db-cc54-4644-8e4c-83c87017a186" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.119491] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Acquired lock "refresh_cache-3c8459db-cc54-4644-8e4c-83c87017a186" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 720.119696] env[68674]: DEBUG nova.network.neutron [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 720.165546] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 1699f556-d451-40e3-a213-7edb753b03f1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 720.213509] env[68674]: DEBUG oslo_vmware.api [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528f90a4-1e6b-449f-3653-1c6246e31d24, 'name': SearchDatastore_Task, 'duration_secs': 0.009709} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.214543] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 720.214543] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 720.214543] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.214543] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 720.214997] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 720.214997] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6175ca5b-c488-4f66-ae48-84029ea278ee {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.222635] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 720.222859] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 720.223586] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1bf20335-0d88-47b8-85d9-00f3eff11c6c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.228673] env[68674]: DEBUG oslo_vmware.api [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 720.228673] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5211e62c-1191-b3eb-b83e-f259ac79ac4e" [ 720.228673] env[68674]: _type = "Task" [ 720.228673] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.236252] env[68674]: DEBUG oslo_vmware.api [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5211e62c-1191-b3eb-b83e-f259ac79ac4e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.279477] env[68674]: DEBUG oslo_vmware.api [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239805, 'name': PowerOnVM_Task, 'duration_secs': 0.543944} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.279820] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 720.280046] env[68674]: DEBUG nova.compute.manager [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 720.280826] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0820f290-374b-4129-b95e-a52d77445458 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.443326] env[68674]: DEBUG nova.virt.hardware [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 720.443326] env[68674]: DEBUG nova.virt.hardware [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 720.443941] env[68674]: DEBUG nova.virt.hardware [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 720.443941] env[68674]: DEBUG nova.virt.hardware [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 720.443941] env[68674]: DEBUG nova.virt.hardware [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 720.444308] env[68674]: DEBUG nova.virt.hardware [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 720.444308] env[68674]: DEBUG nova.virt.hardware [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 720.444519] env[68674]: DEBUG nova.virt.hardware [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 720.444760] env[68674]: DEBUG nova.virt.hardware [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 720.444955] env[68674]: DEBUG nova.virt.hardware [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 720.445149] env[68674]: DEBUG nova.virt.hardware [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 720.450856] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Reconfiguring VM instance instance-0000001f to detach disk 2000 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 720.451223] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-04c66a86-8227-48ca-b661-f9de4b2aea4b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.473077] env[68674]: DEBUG oslo_vmware.api [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 720.473077] env[68674]: value = "task-3239810" [ 720.473077] env[68674]: _type = "Task" [ 720.473077] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.480950] env[68674]: DEBUG oslo_vmware.api [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3239810, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.489255] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8741d3d1-1172-4195-9e6a-f23a65878e89 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Lock "3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.196s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 720.554400] env[68674]: DEBUG oslo_vmware.api [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': task-3239808, 'name': ReconfigVM_Task, 'duration_secs': 0.269285} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.554722] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Reconfigured VM instance instance-0000002a to attach disk [datastore2] 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49/1b405b1f-ee1f-4e6e-9355-de8b5c26ab49.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 720.555346] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c8eeacad-581f-4d78-9965-1600794d10ff {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.561146] env[68674]: DEBUG oslo_vmware.api [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Waiting for the task: (returnval){ [ 720.561146] env[68674]: value = "task-3239811" [ 720.561146] env[68674]: _type = "Task" [ 720.561146] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.568977] env[68674]: DEBUG oslo_vmware.api [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': task-3239811, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.668947] env[68674]: DEBUG nova.network.neutron [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 720.672168] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 0e3c27fe-a2d9-45dc-9559-a678f90a6fef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 720.740736] env[68674]: DEBUG oslo_vmware.api [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5211e62c-1191-b3eb-b83e-f259ac79ac4e, 'name': SearchDatastore_Task, 'duration_secs': 0.008698} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.741491] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d15ce89-56cc-49da-9d05-1c9202d47fb5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.746972] env[68674]: DEBUG oslo_vmware.api [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 720.746972] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524991d3-edb7-74a0-7c16-f8549c65411a" [ 720.746972] env[68674]: _type = "Task" [ 720.746972] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.757269] env[68674]: DEBUG oslo_vmware.api [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524991d3-edb7-74a0-7c16-f8549c65411a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.799176] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 720.860891] env[68674]: DEBUG nova.network.neutron [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Updating instance_info_cache with network_info: [{"id": "1dd86562-12e7-45c7-9bc1-e9b4ed28e43e", "address": "fa:16:3e:86:f7:9e", "network": {"id": "27a717fb-73c6-4d73-a43c-a34652cafce4", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-133933643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c2671e9238b4d00b011b7e521ca73bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68add7d6-c025-46fa-84d3-9c589adb63e4", "external-id": "nsx-vlan-transportzone-961", "segmentation_id": 961, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1dd86562-12", "ovs_interfaceid": "1dd86562-12e7-45c7-9bc1-e9b4ed28e43e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.984591] env[68674]: DEBUG oslo_vmware.api [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3239810, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.991118] env[68674]: DEBUG nova.compute.manager [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 721.071365] env[68674]: DEBUG oslo_vmware.api [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': task-3239811, 'name': Rename_Task, 'duration_secs': 0.129826} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.071609] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 721.071853] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c389dc65-7ccf-4531-b10b-9adebf66284d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.080320] env[68674]: DEBUG oslo_vmware.api [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Waiting for the task: (returnval){ [ 721.080320] env[68674]: value = "task-3239812" [ 721.080320] env[68674]: _type = "Task" [ 721.080320] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.090725] env[68674]: DEBUG oslo_vmware.api [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': task-3239812, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.175634] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 5e3f667c-5d3a-4465-9186-779563087480 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 721.260227] env[68674]: DEBUG oslo_vmware.api [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524991d3-edb7-74a0-7c16-f8549c65411a, 'name': SearchDatastore_Task, 'duration_secs': 0.009863} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.260486] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 721.260748] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] d88ccf9b-7432-4be0-82f7-b2a9155f7d86/d88ccf9b-7432-4be0-82f7-b2a9155f7d86.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 721.261021] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c75db2cc-724e-47c9-afae-dd546212a7fe {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.273373] env[68674]: DEBUG oslo_vmware.api [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 721.273373] env[68674]: value = "task-3239813" [ 721.273373] env[68674]: _type = "Task" [ 721.273373] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.363564] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Releasing lock "refresh_cache-3c8459db-cc54-4644-8e4c-83c87017a186" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 721.363967] env[68674]: DEBUG nova.compute.manager [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Instance network_info: |[{"id": "1dd86562-12e7-45c7-9bc1-e9b4ed28e43e", "address": "fa:16:3e:86:f7:9e", "network": {"id": "27a717fb-73c6-4d73-a43c-a34652cafce4", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-133933643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c2671e9238b4d00b011b7e521ca73bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68add7d6-c025-46fa-84d3-9c589adb63e4", "external-id": "nsx-vlan-transportzone-961", "segmentation_id": 961, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1dd86562-12", "ovs_interfaceid": "1dd86562-12e7-45c7-9bc1-e9b4ed28e43e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 721.364460] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:86:f7:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '68add7d6-c025-46fa-84d3-9c589adb63e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1dd86562-12e7-45c7-9bc1-e9b4ed28e43e', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 721.372534] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Creating folder: Project (1c2671e9238b4d00b011b7e521ca73bb). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 721.372799] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-db78b4c7-7d0b-4d8e-99ac-ef53512a9a3b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.382937] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Created folder: Project (1c2671e9238b4d00b011b7e521ca73bb) in parent group-v647377. [ 721.383174] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Creating folder: Instances. Parent ref: group-v647505. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 721.383415] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5c8fd06a-4404-48b1-9655-b146a27834bd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.392992] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Created folder: Instances in parent group-v647505. [ 721.393506] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 721.393754] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 721.393991] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3b4908dc-0992-45e7-a49c-bd4d759a51c2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.415590] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 721.415590] env[68674]: value = "task-3239816" [ 721.415590] env[68674]: _type = "Task" [ 721.415590] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.424507] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239816, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.484938] env[68674]: DEBUG oslo_vmware.api [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3239810, 'name': ReconfigVM_Task, 'duration_secs': 0.782139} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.485380] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Reconfigured VM instance instance-0000001f to detach disk 2000 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 721.486832] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ad89121-e9d2-4054-bd5d-7c460bb534f7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.514365] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] 77fa5a89-961b-4c84-a75e-a5be0253677e/77fa5a89-961b-4c84-a75e-a5be0253677e.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 721.517198] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-533fa115-329c-4b70-9be9-b9929f355b8d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.541773] env[68674]: DEBUG oslo_vmware.api [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 721.541773] env[68674]: value = "task-3239817" [ 721.541773] env[68674]: _type = "Task" [ 721.541773] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.548588] env[68674]: DEBUG oslo_concurrency.lockutils [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 721.552398] env[68674]: DEBUG oslo_vmware.api [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3239817, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.594518] env[68674]: DEBUG oslo_vmware.api [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': task-3239812, 'name': PowerOnVM_Task, 'duration_secs': 0.511313} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.594782] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 721.594960] env[68674]: INFO nova.compute.manager [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Took 7.86 seconds to spawn the instance on the hypervisor. [ 721.595196] env[68674]: DEBUG nova.compute.manager [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 721.596252] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ba6d318-caf2-4d34-aadc-bf2a57f7f2c5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.680511] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 2007222e-e4e5-44b3-bd9e-55b4a2143c3e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 721.686892] env[68674]: DEBUG nova.compute.manager [req-7c1c2534-3bd2-4ba6-a4fe-17c73a2750be req-afdd2550-86fb-4a87-8f6f-73a6f4c1a6f8 service nova] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Received event network-changed-1dd86562-12e7-45c7-9bc1-e9b4ed28e43e {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 721.687116] env[68674]: DEBUG nova.compute.manager [req-7c1c2534-3bd2-4ba6-a4fe-17c73a2750be req-afdd2550-86fb-4a87-8f6f-73a6f4c1a6f8 service nova] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Refreshing instance network info cache due to event network-changed-1dd86562-12e7-45c7-9bc1-e9b4ed28e43e. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 721.687696] env[68674]: DEBUG oslo_concurrency.lockutils [req-7c1c2534-3bd2-4ba6-a4fe-17c73a2750be req-afdd2550-86fb-4a87-8f6f-73a6f4c1a6f8 service nova] Acquiring lock "refresh_cache-3c8459db-cc54-4644-8e4c-83c87017a186" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.687696] env[68674]: DEBUG oslo_concurrency.lockutils [req-7c1c2534-3bd2-4ba6-a4fe-17c73a2750be req-afdd2550-86fb-4a87-8f6f-73a6f4c1a6f8 service nova] Acquired lock "refresh_cache-3c8459db-cc54-4644-8e4c-83c87017a186" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 721.687696] env[68674]: DEBUG nova.network.neutron [req-7c1c2534-3bd2-4ba6-a4fe-17c73a2750be req-afdd2550-86fb-4a87-8f6f-73a6f4c1a6f8 service nova] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Refreshing network info cache for port 1dd86562-12e7-45c7-9bc1-e9b4ed28e43e {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 721.789459] env[68674]: DEBUG oslo_vmware.api [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239813, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.514748} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.789459] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] d88ccf9b-7432-4be0-82f7-b2a9155f7d86/d88ccf9b-7432-4be0-82f7-b2a9155f7d86.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 721.789459] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 721.789459] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4b9cf248-7111-4e88-b1ac-2bead2ff780f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.799069] env[68674]: DEBUG oslo_vmware.api [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 721.799069] env[68674]: value = "task-3239818" [ 721.799069] env[68674]: _type = "Task" [ 721.799069] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.809167] env[68674]: DEBUG oslo_vmware.api [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239818, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.930886] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239816, 'name': CreateVM_Task, 'duration_secs': 0.494711} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.931035] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 721.931958] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.932232] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 721.932671] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 721.933020] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31e2dbdd-fd39-4d67-b285-01dd8c346e9f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.939024] env[68674]: DEBUG oslo_vmware.api [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Waiting for the task: (returnval){ [ 721.939024] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52727377-662c-7748-438d-b7b0eb790e55" [ 721.939024] env[68674]: _type = "Task" [ 721.939024] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.951251] env[68674]: DEBUG oslo_vmware.api [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52727377-662c-7748-438d-b7b0eb790e55, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.051348] env[68674]: DEBUG oslo_vmware.api [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3239817, 'name': ReconfigVM_Task, 'duration_secs': 0.419013} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.051631] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Reconfigured VM instance instance-0000001f to attach disk [datastore1] 77fa5a89-961b-4c84-a75e-a5be0253677e/77fa5a89-961b-4c84-a75e-a5be0253677e.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 722.051892] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Updating instance '77fa5a89-961b-4c84-a75e-a5be0253677e' progress to 50 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 722.115995] env[68674]: INFO nova.compute.manager [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Took 51.71 seconds to build instance. [ 722.190277] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 714142ec-89ad-44ab-8543-11493172a50b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 722.190277] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Migration 21a51122-368f-4c4d-9f78-bddd3b48ff9c is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 722.190277] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 77fa5a89-961b-4c84-a75e-a5be0253677e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 722.307616] env[68674]: DEBUG oslo_vmware.api [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239818, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069692} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.307921] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 722.308756] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e7d6b7b-927d-4ddc-9b56-975a41a8c562 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.339352] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Reconfiguring VM instance instance-00000028 to attach disk [datastore2] d88ccf9b-7432-4be0-82f7-b2a9155f7d86/d88ccf9b-7432-4be0-82f7-b2a9155f7d86.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 722.342297] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-033fa255-46d3-411e-9b9d-904986e67dd9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.361166] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-f102fd01-6bbc-41b6-8941-3aa201772e62 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Volume attach. Driver type: vmdk {{(pid=68674) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 722.361478] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-f102fd01-6bbc-41b6-8941-3aa201772e62 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647504', 'volume_id': '600a9d1f-0ffd-4d51-89b0-9bdf3c055337', 'name': 'volume-600a9d1f-0ffd-4d51-89b0-9bdf3c055337', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'fa89e0b5-590d-43fb-bb11-02f8fdee0c2f', 'attached_at': '', 'detached_at': '', 'volume_id': '600a9d1f-0ffd-4d51-89b0-9bdf3c055337', 'serial': '600a9d1f-0ffd-4d51-89b0-9bdf3c055337'} {{(pid=68674) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 722.362419] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd9df4f8-0d04-47b6-ba54-abc1f91d5c65 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.366480] env[68674]: DEBUG oslo_vmware.api [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 722.366480] env[68674]: value = "task-3239819" [ 722.366480] env[68674]: _type = "Task" [ 722.366480] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.382629] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45b2ca79-86bf-499e-b818-d1912bca2db2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.388743] env[68674]: DEBUG oslo_vmware.api [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239819, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.412546] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-f102fd01-6bbc-41b6-8941-3aa201772e62 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Reconfiguring VM instance instance-00000015 to attach disk [datastore2] volume-600a9d1f-0ffd-4d51-89b0-9bdf3c055337/volume-600a9d1f-0ffd-4d51-89b0-9bdf3c055337.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 722.415396] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-405b41aa-e4f1-4842-bb62-4799a04f690c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.433998] env[68674]: DEBUG oslo_vmware.api [None req-f102fd01-6bbc-41b6-8941-3aa201772e62 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Waiting for the task: (returnval){ [ 722.433998] env[68674]: value = "task-3239820" [ 722.433998] env[68674]: _type = "Task" [ 722.433998] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.443129] env[68674]: DEBUG oslo_vmware.api [None req-f102fd01-6bbc-41b6-8941-3aa201772e62 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3239820, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.451943] env[68674]: DEBUG oslo_vmware.api [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52727377-662c-7748-438d-b7b0eb790e55, 'name': SearchDatastore_Task, 'duration_secs': 0.010422} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.452341] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 722.452587] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 722.456855] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.456855] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 722.456855] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 722.456855] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d2afc37b-4d65-462d-942b-d798084c181c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.463647] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 722.463906] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 722.467374] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0da38dac-065e-4869-aef8-823b87055db4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.472883] env[68674]: DEBUG oslo_vmware.api [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Waiting for the task: (returnval){ [ 722.472883] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]529dd6d0-25f7-dc4a-b824-55ec3995dbe7" [ 722.472883] env[68674]: _type = "Task" [ 722.472883] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.482703] env[68674]: DEBUG oslo_vmware.api [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]529dd6d0-25f7-dc4a-b824-55ec3995dbe7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.554576] env[68674]: DEBUG nova.network.neutron [req-7c1c2534-3bd2-4ba6-a4fe-17c73a2750be req-afdd2550-86fb-4a87-8f6f-73a6f4c1a6f8 service nova] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Updated VIF entry in instance network info cache for port 1dd86562-12e7-45c7-9bc1-e9b4ed28e43e. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 722.554852] env[68674]: DEBUG nova.network.neutron [req-7c1c2534-3bd2-4ba6-a4fe-17c73a2750be req-afdd2550-86fb-4a87-8f6f-73a6f4c1a6f8 service nova] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Updating instance_info_cache with network_info: [{"id": "1dd86562-12e7-45c7-9bc1-e9b4ed28e43e", "address": "fa:16:3e:86:f7:9e", "network": {"id": "27a717fb-73c6-4d73-a43c-a34652cafce4", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-133933643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c2671e9238b4d00b011b7e521ca73bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68add7d6-c025-46fa-84d3-9c589adb63e4", "external-id": "nsx-vlan-transportzone-961", "segmentation_id": 961, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1dd86562-12", "ovs_interfaceid": "1dd86562-12e7-45c7-9bc1-e9b4ed28e43e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.559963] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3b5c2c2-ac3d-4751-b8d2-b0d2b6a2df46 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.586206] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0de9c85-58ac-4a22-ad04-81b4bef1a84c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.605977] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Updating instance '77fa5a89-961b-4c84-a75e-a5be0253677e' progress to 67 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 722.619664] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0ea25eb7-c7c4-42dd-bdd2-20bd7f3efd94 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Lock "1b405b1f-ee1f-4e6e-9355-de8b5c26ab49" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 91.838s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 722.688830] env[68674]: DEBUG oslo_concurrency.lockutils [None req-71ee26b0-70c2-489a-aa90-4e98d73a2db8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "505b0352-39ab-4841-8766-14626af2b13e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 722.689038] env[68674]: DEBUG oslo_concurrency.lockutils [None req-71ee26b0-70c2-489a-aa90-4e98d73a2db8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "505b0352-39ab-4841-8766-14626af2b13e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 722.689365] env[68674]: DEBUG oslo_concurrency.lockutils [None req-71ee26b0-70c2-489a-aa90-4e98d73a2db8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "505b0352-39ab-4841-8766-14626af2b13e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 722.689581] env[68674]: DEBUG oslo_concurrency.lockutils [None req-71ee26b0-70c2-489a-aa90-4e98d73a2db8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "505b0352-39ab-4841-8766-14626af2b13e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 722.689752] env[68674]: DEBUG oslo_concurrency.lockutils [None req-71ee26b0-70c2-489a-aa90-4e98d73a2db8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "505b0352-39ab-4841-8766-14626af2b13e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 722.692660] env[68674]: INFO nova.compute.manager [None req-71ee26b0-70c2-489a-aa90-4e98d73a2db8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Terminating instance [ 722.694264] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 55727bbc-6b65-4e4c-ba4f-8240efbf052a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 722.878641] env[68674]: DEBUG oslo_vmware.api [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239819, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.944728] env[68674]: DEBUG oslo_vmware.api [None req-f102fd01-6bbc-41b6-8941-3aa201772e62 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3239820, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.986024] env[68674]: DEBUG oslo_vmware.api [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]529dd6d0-25f7-dc4a-b824-55ec3995dbe7, 'name': SearchDatastore_Task, 'duration_secs': 0.013801} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.986024] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-591b48c0-76a3-43a9-93ce-825139153a13 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.988670] env[68674]: DEBUG oslo_vmware.api [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Waiting for the task: (returnval){ [ 722.988670] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5232ec37-eee6-79e7-5e22-1c7046106f30" [ 722.988670] env[68674]: _type = "Task" [ 722.988670] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.996616] env[68674]: DEBUG oslo_vmware.api [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5232ec37-eee6-79e7-5e22-1c7046106f30, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.061658] env[68674]: DEBUG oslo_concurrency.lockutils [req-7c1c2534-3bd2-4ba6-a4fe-17c73a2750be req-afdd2550-86fb-4a87-8f6f-73a6f4c1a6f8 service nova] Releasing lock "refresh_cache-3c8459db-cc54-4644-8e4c-83c87017a186" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 723.061658] env[68674]: DEBUG nova.compute.manager [req-7c1c2534-3bd2-4ba6-a4fe-17c73a2750be req-afdd2550-86fb-4a87-8f6f-73a6f4c1a6f8 service nova] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Received event network-changed-328800af-995b-4980-a8a3-4a51aa3c17e5 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 723.061658] env[68674]: DEBUG nova.compute.manager [req-7c1c2534-3bd2-4ba6-a4fe-17c73a2750be req-afdd2550-86fb-4a87-8f6f-73a6f4c1a6f8 service nova] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Refreshing instance network info cache due to event network-changed-328800af-995b-4980-a8a3-4a51aa3c17e5. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 723.061658] env[68674]: DEBUG oslo_concurrency.lockutils [req-7c1c2534-3bd2-4ba6-a4fe-17c73a2750be req-afdd2550-86fb-4a87-8f6f-73a6f4c1a6f8 service nova] Acquiring lock "refresh_cache-3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.061658] env[68674]: DEBUG oslo_concurrency.lockutils [req-7c1c2534-3bd2-4ba6-a4fe-17c73a2750be req-afdd2550-86fb-4a87-8f6f-73a6f4c1a6f8 service nova] Acquired lock "refresh_cache-3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 723.061947] env[68674]: DEBUG nova.network.neutron [req-7c1c2534-3bd2-4ba6-a4fe-17c73a2750be req-afdd2550-86fb-4a87-8f6f-73a6f4c1a6f8 service nova] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Refreshing network info cache for port 328800af-995b-4980-a8a3-4a51aa3c17e5 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 723.122028] env[68674]: DEBUG nova.compute.manager [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 723.129779] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e6429b3-0f14-4b8a-8685-7b8631dbd0f5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.136977] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-cd3faa0b-c5c7-4f08-afac-89934ab008a8 tempest-ServersAdminNegativeTestJSON-1421585280 tempest-ServersAdminNegativeTestJSON-1421585280-project-admin] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Suspending the VM {{(pid=68674) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 723.137782] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-3f8a7bd8-50df-40f3-9f4f-098e5214f525 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.143691] env[68674]: DEBUG oslo_vmware.api [None req-cd3faa0b-c5c7-4f08-afac-89934ab008a8 tempest-ServersAdminNegativeTestJSON-1421585280 tempest-ServersAdminNegativeTestJSON-1421585280-project-admin] Waiting for the task: (returnval){ [ 723.143691] env[68674]: value = "task-3239821" [ 723.143691] env[68674]: _type = "Task" [ 723.143691] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.153513] env[68674]: DEBUG oslo_vmware.api [None req-cd3faa0b-c5c7-4f08-afac-89934ab008a8 tempest-ServersAdminNegativeTestJSON-1421585280 tempest-ServersAdminNegativeTestJSON-1421585280-project-admin] Task: {'id': task-3239821, 'name': SuspendVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.175697] env[68674]: DEBUG nova.network.neutron [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Port b1a3099e-550f-4bc4-a4b5-1fe1e04ea342 binding to destination host cpu-1 is already ACTIVE {{(pid=68674) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 723.197606] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance f029042f-d80b-453e-adc9-1e65d7da7aaf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 723.202228] env[68674]: DEBUG nova.compute.manager [None req-71ee26b0-70c2-489a-aa90-4e98d73a2db8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 723.202228] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-71ee26b0-70c2-489a-aa90-4e98d73a2db8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 723.202228] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb63fabb-b741-44d3-8480-6ac7d225baf1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.211479] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-71ee26b0-70c2-489a-aa90-4e98d73a2db8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 723.211479] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a7533d9b-495d-47fe-86e2-cf0f7148c8b1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.217917] env[68674]: DEBUG oslo_vmware.api [None req-71ee26b0-70c2-489a-aa90-4e98d73a2db8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 723.217917] env[68674]: value = "task-3239822" [ 723.217917] env[68674]: _type = "Task" [ 723.217917] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.229069] env[68674]: DEBUG oslo_vmware.api [None req-71ee26b0-70c2-489a-aa90-4e98d73a2db8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239822, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.378809] env[68674]: DEBUG oslo_vmware.api [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239819, 'name': ReconfigVM_Task, 'duration_secs': 0.613134} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.379201] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Reconfigured VM instance instance-00000028 to attach disk [datastore2] d88ccf9b-7432-4be0-82f7-b2a9155f7d86/d88ccf9b-7432-4be0-82f7-b2a9155f7d86.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 723.379889] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9e84b7b3-c117-4ba6-977f-a68eb0cc541c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.387047] env[68674]: DEBUG oslo_vmware.api [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 723.387047] env[68674]: value = "task-3239823" [ 723.387047] env[68674]: _type = "Task" [ 723.387047] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.398981] env[68674]: DEBUG oslo_vmware.api [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239823, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.444949] env[68674]: DEBUG oslo_vmware.api [None req-f102fd01-6bbc-41b6-8941-3aa201772e62 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3239820, 'name': ReconfigVM_Task, 'duration_secs': 0.848344} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.445374] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-f102fd01-6bbc-41b6-8941-3aa201772e62 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Reconfigured VM instance instance-00000015 to attach disk [datastore2] volume-600a9d1f-0ffd-4d51-89b0-9bdf3c055337/volume-600a9d1f-0ffd-4d51-89b0-9bdf3c055337.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 723.451305] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-319b40cc-1a2e-4340-8878-d832bc690737 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.470681] env[68674]: DEBUG oslo_vmware.api [None req-f102fd01-6bbc-41b6-8941-3aa201772e62 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Waiting for the task: (returnval){ [ 723.470681] env[68674]: value = "task-3239824" [ 723.470681] env[68674]: _type = "Task" [ 723.470681] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.482592] env[68674]: DEBUG oslo_vmware.api [None req-f102fd01-6bbc-41b6-8941-3aa201772e62 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3239824, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.498700] env[68674]: DEBUG oslo_vmware.api [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5232ec37-eee6-79e7-5e22-1c7046106f30, 'name': SearchDatastore_Task, 'duration_secs': 0.03682} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.498978] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 723.499263] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 3c8459db-cc54-4644-8e4c-83c87017a186/3c8459db-cc54-4644-8e4c-83c87017a186.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 723.499542] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-72289c36-b277-45d2-b0ac-6c1241bce408 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.505974] env[68674]: DEBUG oslo_vmware.api [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Waiting for the task: (returnval){ [ 723.505974] env[68674]: value = "task-3239825" [ 723.505974] env[68674]: _type = "Task" [ 723.505974] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.514058] env[68674]: DEBUG oslo_vmware.api [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Task: {'id': task-3239825, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.649526] env[68674]: DEBUG oslo_concurrency.lockutils [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 723.654992] env[68674]: DEBUG oslo_vmware.api [None req-cd3faa0b-c5c7-4f08-afac-89934ab008a8 tempest-ServersAdminNegativeTestJSON-1421585280 tempest-ServersAdminNegativeTestJSON-1421585280-project-admin] Task: {'id': task-3239821, 'name': SuspendVM_Task} progress is 62%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.700204] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 5c12cb5d-821c-4e63-86a0-dadc9794a8ba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 723.728746] env[68674]: DEBUG oslo_vmware.api [None req-71ee26b0-70c2-489a-aa90-4e98d73a2db8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239822, 'name': PowerOffVM_Task, 'duration_secs': 0.316693} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.728932] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-71ee26b0-70c2-489a-aa90-4e98d73a2db8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 723.729131] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-71ee26b0-70c2-489a-aa90-4e98d73a2db8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 723.729385] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-26cd7a9b-3602-4b4c-af67-6dede05b281e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.800192] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-71ee26b0-70c2-489a-aa90-4e98d73a2db8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 723.802740] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-71ee26b0-70c2-489a-aa90-4e98d73a2db8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 723.802740] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-71ee26b0-70c2-489a-aa90-4e98d73a2db8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Deleting the datastore file [datastore1] 505b0352-39ab-4841-8766-14626af2b13e {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 723.802740] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-46ea7e1b-7ed1-4f96-9968-2bfd84c4a0f1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.809841] env[68674]: DEBUG oslo_vmware.api [None req-71ee26b0-70c2-489a-aa90-4e98d73a2db8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 723.809841] env[68674]: value = "task-3239827" [ 723.809841] env[68674]: _type = "Task" [ 723.809841] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.821680] env[68674]: DEBUG oslo_vmware.api [None req-71ee26b0-70c2-489a-aa90-4e98d73a2db8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239827, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.866202] env[68674]: DEBUG nova.network.neutron [req-7c1c2534-3bd2-4ba6-a4fe-17c73a2750be req-afdd2550-86fb-4a87-8f6f-73a6f4c1a6f8 service nova] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Updated VIF entry in instance network info cache for port 328800af-995b-4980-a8a3-4a51aa3c17e5. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 723.866202] env[68674]: DEBUG nova.network.neutron [req-7c1c2534-3bd2-4ba6-a4fe-17c73a2750be req-afdd2550-86fb-4a87-8f6f-73a6f4c1a6f8 service nova] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Updating instance_info_cache with network_info: [{"id": "328800af-995b-4980-a8a3-4a51aa3c17e5", "address": "fa:16:3e:6a:2a:04", "network": {"id": "69e2ef02-944e-40b2-88f2-3a00c754a5eb", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-125455610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "435fbf1f847d4d36ba126fc8c49b59fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3e0aae3-33d1-403b-bfaf-306f77a1422e", "external-id": "nsx-vlan-transportzone-211", "segmentation_id": 211, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap328800af-99", "ovs_interfaceid": "328800af-995b-4980-a8a3-4a51aa3c17e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.905031] env[68674]: DEBUG oslo_vmware.api [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239823, 'name': Rename_Task, 'duration_secs': 0.24844} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.905031] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 723.905031] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3daf31bd-86fa-4fbe-9fb0-cbf5c0840e7e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.913283] env[68674]: DEBUG oslo_vmware.api [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 723.913283] env[68674]: value = "task-3239828" [ 723.913283] env[68674]: _type = "Task" [ 723.913283] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.924270] env[68674]: DEBUG oslo_vmware.api [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239828, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.986045] env[68674]: DEBUG oslo_vmware.api [None req-f102fd01-6bbc-41b6-8941-3aa201772e62 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3239824, 'name': ReconfigVM_Task, 'duration_secs': 0.160552} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.986325] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-f102fd01-6bbc-41b6-8941-3aa201772e62 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647504', 'volume_id': '600a9d1f-0ffd-4d51-89b0-9bdf3c055337', 'name': 'volume-600a9d1f-0ffd-4d51-89b0-9bdf3c055337', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'fa89e0b5-590d-43fb-bb11-02f8fdee0c2f', 'attached_at': '', 'detached_at': '', 'volume_id': '600a9d1f-0ffd-4d51-89b0-9bdf3c055337', 'serial': '600a9d1f-0ffd-4d51-89b0-9bdf3c055337'} {{(pid=68674) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 724.017067] env[68674]: DEBUG oslo_vmware.api [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Task: {'id': task-3239825, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.502132} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.017430] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 3c8459db-cc54-4644-8e4c-83c87017a186/3c8459db-cc54-4644-8e4c-83c87017a186.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 724.017700] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 724.018153] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1cf9451e-2598-4f60-9922-5affd48d2d60 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.031903] env[68674]: DEBUG oslo_vmware.api [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Waiting for the task: (returnval){ [ 724.031903] env[68674]: value = "task-3239829" [ 724.031903] env[68674]: _type = "Task" [ 724.031903] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.044824] env[68674]: DEBUG oslo_vmware.api [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Task: {'id': task-3239829, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.155490] env[68674]: DEBUG oslo_vmware.api [None req-cd3faa0b-c5c7-4f08-afac-89934ab008a8 tempest-ServersAdminNegativeTestJSON-1421585280 tempest-ServersAdminNegativeTestJSON-1421585280-project-admin] Task: {'id': task-3239821, 'name': SuspendVM_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.197925] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquiring lock "77fa5a89-961b-4c84-a75e-a5be0253677e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 724.198862] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lock "77fa5a89-961b-4c84-a75e-a5be0253677e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 724.198862] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lock "77fa5a89-961b-4c84-a75e-a5be0253677e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 724.202955] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance f69c5fcf-6d25-48a5-a154-c3632c76175a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 724.321405] env[68674]: DEBUG oslo_vmware.api [None req-71ee26b0-70c2-489a-aa90-4e98d73a2db8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239827, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.276587} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.321668] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-71ee26b0-70c2-489a-aa90-4e98d73a2db8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 724.321853] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-71ee26b0-70c2-489a-aa90-4e98d73a2db8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 724.322041] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-71ee26b0-70c2-489a-aa90-4e98d73a2db8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 724.322313] env[68674]: INFO nova.compute.manager [None req-71ee26b0-70c2-489a-aa90-4e98d73a2db8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Took 1.12 seconds to destroy the instance on the hypervisor. [ 724.322558] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-71ee26b0-70c2-489a-aa90-4e98d73a2db8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 724.322744] env[68674]: DEBUG nova.compute.manager [-] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 724.322838] env[68674]: DEBUG nova.network.neutron [-] [instance: 505b0352-39ab-4841-8766-14626af2b13e] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 724.368903] env[68674]: DEBUG oslo_concurrency.lockutils [req-7c1c2534-3bd2-4ba6-a4fe-17c73a2750be req-afdd2550-86fb-4a87-8f6f-73a6f4c1a6f8 service nova] Releasing lock "refresh_cache-3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 724.369254] env[68674]: DEBUG nova.compute.manager [req-7c1c2534-3bd2-4ba6-a4fe-17c73a2750be req-afdd2550-86fb-4a87-8f6f-73a6f4c1a6f8 service nova] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Received event network-changed-328800af-995b-4980-a8a3-4a51aa3c17e5 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 724.369434] env[68674]: DEBUG nova.compute.manager [req-7c1c2534-3bd2-4ba6-a4fe-17c73a2750be req-afdd2550-86fb-4a87-8f6f-73a6f4c1a6f8 service nova] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Refreshing instance network info cache due to event network-changed-328800af-995b-4980-a8a3-4a51aa3c17e5. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 724.369646] env[68674]: DEBUG oslo_concurrency.lockutils [req-7c1c2534-3bd2-4ba6-a4fe-17c73a2750be req-afdd2550-86fb-4a87-8f6f-73a6f4c1a6f8 service nova] Acquiring lock "refresh_cache-3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.369788] env[68674]: DEBUG oslo_concurrency.lockutils [req-7c1c2534-3bd2-4ba6-a4fe-17c73a2750be req-afdd2550-86fb-4a87-8f6f-73a6f4c1a6f8 service nova] Acquired lock "refresh_cache-3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 724.369948] env[68674]: DEBUG nova.network.neutron [req-7c1c2534-3bd2-4ba6-a4fe-17c73a2750be req-afdd2550-86fb-4a87-8f6f-73a6f4c1a6f8 service nova] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Refreshing network info cache for port 328800af-995b-4980-a8a3-4a51aa3c17e5 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 724.423944] env[68674]: DEBUG oslo_vmware.api [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239828, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.474348] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Acquiring lock "7329e503-d87d-4e15-b181-65ac6e376781" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 724.474592] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Lock "7329e503-d87d-4e15-b181-65ac6e376781" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 724.539693] env[68674]: DEBUG oslo_vmware.api [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Task: {'id': task-3239829, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081253} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.539959] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 724.541921] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68f78ee2-4154-485e-9784-72195510ee3f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.572574] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Reconfiguring VM instance instance-0000002b to attach disk [datastore2] 3c8459db-cc54-4644-8e4c-83c87017a186/3c8459db-cc54-4644-8e4c-83c87017a186.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 724.572574] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c13fd1b-a6e1-45ae-a7ff-e2a48832837a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.599809] env[68674]: DEBUG oslo_vmware.api [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Waiting for the task: (returnval){ [ 724.599809] env[68674]: value = "task-3239830" [ 724.599809] env[68674]: _type = "Task" [ 724.599809] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.613150] env[68674]: DEBUG oslo_vmware.api [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Task: {'id': task-3239830, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.656204] env[68674]: DEBUG oslo_vmware.api [None req-cd3faa0b-c5c7-4f08-afac-89934ab008a8 tempest-ServersAdminNegativeTestJSON-1421585280 tempest-ServersAdminNegativeTestJSON-1421585280-project-admin] Task: {'id': task-3239821, 'name': SuspendVM_Task, 'duration_secs': 1.077998} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.656536] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-cd3faa0b-c5c7-4f08-afac-89934ab008a8 tempest-ServersAdminNegativeTestJSON-1421585280 tempest-ServersAdminNegativeTestJSON-1421585280-project-admin] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Suspended the VM {{(pid=68674) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 724.656668] env[68674]: DEBUG nova.compute.manager [None req-cd3faa0b-c5c7-4f08-afac-89934ab008a8 tempest-ServersAdminNegativeTestJSON-1421585280 tempest-ServersAdminNegativeTestJSON-1421585280-project-admin] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 724.657441] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3380546-da63-4fc8-b686-9fa50bf49374 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.706807] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 724.929497] env[68674]: DEBUG oslo_vmware.api [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239828, 'name': PowerOnVM_Task, 'duration_secs': 0.797337} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.929807] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 724.930043] env[68674]: DEBUG nova.compute.manager [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 724.930875] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ea93f7c-09d9-4f14-90af-8204afd84ac8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.943574] env[68674]: DEBUG nova.compute.manager [req-12aefa08-27e2-4a91-84fc-b48572def47b req-2083f4f7-074c-471d-ab19-ddf0cd7cd73f service nova] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Received event network-vif-deleted-070f055c-efb5-4c7e-ba62-e44b000f2eeb {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 724.943782] env[68674]: INFO nova.compute.manager [req-12aefa08-27e2-4a91-84fc-b48572def47b req-2083f4f7-074c-471d-ab19-ddf0cd7cd73f service nova] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Neutron deleted interface 070f055c-efb5-4c7e-ba62-e44b000f2eeb; detaching it from the instance and deleting it from the info cache [ 724.943953] env[68674]: DEBUG nova.network.neutron [req-12aefa08-27e2-4a91-84fc-b48572def47b req-2083f4f7-074c-471d-ab19-ddf0cd7cd73f service nova] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.031464] env[68674]: DEBUG nova.objects.instance [None req-f102fd01-6bbc-41b6-8941-3aa201772e62 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Lazy-loading 'flavor' on Instance uuid fa89e0b5-590d-43fb-bb11-02f8fdee0c2f {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 725.109256] env[68674]: DEBUG oslo_vmware.api [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Task: {'id': task-3239830, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.142141] env[68674]: DEBUG nova.network.neutron [req-7c1c2534-3bd2-4ba6-a4fe-17c73a2750be req-afdd2550-86fb-4a87-8f6f-73a6f4c1a6f8 service nova] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Updated VIF entry in instance network info cache for port 328800af-995b-4980-a8a3-4a51aa3c17e5. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 725.142141] env[68674]: DEBUG nova.network.neutron [req-7c1c2534-3bd2-4ba6-a4fe-17c73a2750be req-afdd2550-86fb-4a87-8f6f-73a6f4c1a6f8 service nova] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Updating instance_info_cache with network_info: [{"id": "328800af-995b-4980-a8a3-4a51aa3c17e5", "address": "fa:16:3e:6a:2a:04", "network": {"id": "69e2ef02-944e-40b2-88f2-3a00c754a5eb", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-125455610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "435fbf1f847d4d36ba126fc8c49b59fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3e0aae3-33d1-403b-bfaf-306f77a1422e", "external-id": "nsx-vlan-transportzone-211", "segmentation_id": 211, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap328800af-99", "ovs_interfaceid": "328800af-995b-4980-a8a3-4a51aa3c17e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.212467] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance c4fd04a7-2b11-4c4b-84d1-53edc1e3f035 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 725.212467] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Total usable vcpus: 48, total allocated vcpus: 20 {{(pid=68674) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 725.212467] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4416MB phys_disk=200GB used_disk=18GB total_vcpus=48 used_vcpus=20 pci_stats=[] {{(pid=68674) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 725.276062] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquiring lock "refresh_cache-77fa5a89-961b-4c84-a75e-a5be0253677e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.276062] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquired lock "refresh_cache-77fa5a89-961b-4c84-a75e-a5be0253677e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 725.276062] env[68674]: DEBUG nova.network.neutron [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 725.377772] env[68674]: DEBUG nova.network.neutron [-] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.449050] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a8d641be-66a2-4c7d-87d4-7b59e6d62381 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.455813] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 725.462703] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5d95234-b994-42b0-8be4-eddecfec263c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.504130] env[68674]: DEBUG nova.compute.manager [req-12aefa08-27e2-4a91-84fc-b48572def47b req-2083f4f7-074c-471d-ab19-ddf0cd7cd73f service nova] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Detach interface failed, port_id=070f055c-efb5-4c7e-ba62-e44b000f2eeb, reason: Instance 505b0352-39ab-4841-8766-14626af2b13e could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 725.540435] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f102fd01-6bbc-41b6-8941-3aa201772e62 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Lock "fa89e0b5-590d-43fb-bb11-02f8fdee0c2f" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.801s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 725.591903] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cf815e99-84d7-4d65-aaad-e084b5ab14db tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Acquiring lock "fa89e0b5-590d-43fb-bb11-02f8fdee0c2f" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 725.592277] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cf815e99-84d7-4d65-aaad-e084b5ab14db tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Lock "fa89e0b5-590d-43fb-bb11-02f8fdee0c2f" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 725.609549] env[68674]: DEBUG oslo_vmware.api [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Task: {'id': task-3239830, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.646345] env[68674]: DEBUG oslo_concurrency.lockutils [req-7c1c2534-3bd2-4ba6-a4fe-17c73a2750be req-afdd2550-86fb-4a87-8f6f-73a6f4c1a6f8 service nova] Releasing lock "refresh_cache-3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 725.838649] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-568f58c0-411a-495b-aa05-e64e0a286622 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.848276] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68e74188-a3c2-4226-8801-a33b87d5d2fa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.879496] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed714624-fbc1-4934-8282-8e537943a9f2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.882551] env[68674]: INFO nova.compute.manager [-] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Took 1.56 seconds to deallocate network for instance. [ 725.891385] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb059782-61fe-4fd1-a849-fae1bf86d631 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.906352] env[68674]: DEBUG nova.compute.provider_tree [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 726.071465] env[68674]: DEBUG nova.network.neutron [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Updating instance_info_cache with network_info: [{"id": "b1a3099e-550f-4bc4-a4b5-1fe1e04ea342", "address": "fa:16:3e:dc:4d:99", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1a3099e-55", "ovs_interfaceid": "b1a3099e-550f-4bc4-a4b5-1fe1e04ea342", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.099855] env[68674]: INFO nova.compute.manager [None req-cf815e99-84d7-4d65-aaad-e084b5ab14db tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Detaching volume 600a9d1f-0ffd-4d51-89b0-9bdf3c055337 [ 726.113177] env[68674]: DEBUG oslo_vmware.api [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Task: {'id': task-3239830, 'name': ReconfigVM_Task, 'duration_secs': 1.021796} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.113501] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Reconfigured VM instance instance-0000002b to attach disk [datastore2] 3c8459db-cc54-4644-8e4c-83c87017a186/3c8459db-cc54-4644-8e4c-83c87017a186.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 726.117249] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8ef62626-5e38-4ca9-b3a3-12ba93251b3c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.122160] env[68674]: DEBUG oslo_vmware.api [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Waiting for the task: (returnval){ [ 726.122160] env[68674]: value = "task-3239831" [ 726.122160] env[68674]: _type = "Task" [ 726.122160] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.130558] env[68674]: DEBUG oslo_vmware.api [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Task: {'id': task-3239831, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.149502] env[68674]: INFO nova.virt.block_device [None req-cf815e99-84d7-4d65-aaad-e084b5ab14db tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Attempting to driver detach volume 600a9d1f-0ffd-4d51-89b0-9bdf3c055337 from mountpoint /dev/sdb [ 726.149749] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf815e99-84d7-4d65-aaad-e084b5ab14db tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Volume detach. Driver type: vmdk {{(pid=68674) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 726.149943] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf815e99-84d7-4d65-aaad-e084b5ab14db tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647504', 'volume_id': '600a9d1f-0ffd-4d51-89b0-9bdf3c055337', 'name': 'volume-600a9d1f-0ffd-4d51-89b0-9bdf3c055337', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'fa89e0b5-590d-43fb-bb11-02f8fdee0c2f', 'attached_at': '', 'detached_at': '', 'volume_id': '600a9d1f-0ffd-4d51-89b0-9bdf3c055337', 'serial': '600a9d1f-0ffd-4d51-89b0-9bdf3c055337'} {{(pid=68674) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 726.150920] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-025fbadd-ea36-4d14-b35e-790afce96630 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.173077] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42b0da58-d5e7-4265-b338-d5c3a8e5c870 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.179987] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-092e905d-7edc-4949-be3b-963a81a1728c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.201315] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dee87a0-483f-4407-bb39-bf90ff84cde5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.216849] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf815e99-84d7-4d65-aaad-e084b5ab14db tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] The volume has not been displaced from its original location: [datastore2] volume-600a9d1f-0ffd-4d51-89b0-9bdf3c055337/volume-600a9d1f-0ffd-4d51-89b0-9bdf3c055337.vmdk. No consolidation needed. {{(pid=68674) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 726.223441] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf815e99-84d7-4d65-aaad-e084b5ab14db tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Reconfiguring VM instance instance-00000015 to detach disk 2001 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 726.223441] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-27101257-8550-4651-af35-d9e088ae6c73 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.240993] env[68674]: DEBUG oslo_vmware.api [None req-cf815e99-84d7-4d65-aaad-e084b5ab14db tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Waiting for the task: (returnval){ [ 726.240993] env[68674]: value = "task-3239832" [ 726.240993] env[68674]: _type = "Task" [ 726.240993] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.252482] env[68674]: DEBUG oslo_vmware.api [None req-cf815e99-84d7-4d65-aaad-e084b5ab14db tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3239832, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.286224] env[68674]: DEBUG oslo_concurrency.lockutils [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "d88ccf9b-7432-4be0-82f7-b2a9155f7d86" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.289928] env[68674]: DEBUG oslo_concurrency.lockutils [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "d88ccf9b-7432-4be0-82f7-b2a9155f7d86" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.289928] env[68674]: DEBUG oslo_concurrency.lockutils [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "d88ccf9b-7432-4be0-82f7-b2a9155f7d86-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.289928] env[68674]: DEBUG oslo_concurrency.lockutils [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "d88ccf9b-7432-4be0-82f7-b2a9155f7d86-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.289928] env[68674]: DEBUG oslo_concurrency.lockutils [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "d88ccf9b-7432-4be0-82f7-b2a9155f7d86-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 726.290360] env[68674]: INFO nova.compute.manager [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Terminating instance [ 726.393023] env[68674]: DEBUG oslo_concurrency.lockutils [None req-71ee26b0-70c2-489a-aa90-4e98d73a2db8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.412273] env[68674]: DEBUG nova.scheduler.client.report [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 726.573832] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Releasing lock "refresh_cache-77fa5a89-961b-4c84-a75e-a5be0253677e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 726.632159] env[68674]: DEBUG oslo_vmware.api [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Task: {'id': task-3239831, 'name': Rename_Task, 'duration_secs': 0.149591} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.632477] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 726.632614] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ce825a0c-38dd-4220-b93d-3f898cde9694 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.638407] env[68674]: DEBUG oslo_vmware.api [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Waiting for the task: (returnval){ [ 726.638407] env[68674]: value = "task-3239833" [ 726.638407] env[68674]: _type = "Task" [ 726.638407] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.646078] env[68674]: DEBUG oslo_vmware.api [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Task: {'id': task-3239833, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.751012] env[68674]: DEBUG oslo_vmware.api [None req-cf815e99-84d7-4d65-aaad-e084b5ab14db tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3239832, 'name': ReconfigVM_Task, 'duration_secs': 0.253233} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.751419] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf815e99-84d7-4d65-aaad-e084b5ab14db tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Reconfigured VM instance instance-00000015 to detach disk 2001 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 726.756118] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dcd127d5-2874-4483-b878-bb5b73c4cc57 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.772719] env[68674]: DEBUG oslo_vmware.api [None req-cf815e99-84d7-4d65-aaad-e084b5ab14db tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Waiting for the task: (returnval){ [ 726.772719] env[68674]: value = "task-3239834" [ 726.772719] env[68674]: _type = "Task" [ 726.772719] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.781326] env[68674]: DEBUG oslo_vmware.api [None req-cf815e99-84d7-4d65-aaad-e084b5ab14db tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3239834, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.793298] env[68674]: DEBUG nova.compute.manager [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 726.793572] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 726.794450] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36d19a59-caee-41e2-82fb-2b519f1d11bc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.801913] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 726.802207] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-857c5c6a-3c08-4b1d-87b5-e37a9165e002 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.808019] env[68674]: DEBUG oslo_vmware.api [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 726.808019] env[68674]: value = "task-3239835" [ 726.808019] env[68674]: _type = "Task" [ 726.808019] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.822962] env[68674]: DEBUG oslo_vmware.api [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239835, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.838039] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "e1283f87-5bdb-4d4e-a1c5-f3b1c9180188" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.838182] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "e1283f87-5bdb-4d4e-a1c5-f3b1c9180188" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.916485] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68674) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 726.916844] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 9.830s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 726.917206] env[68674]: DEBUG oslo_concurrency.lockutils [None req-06a795e5-d487-4b51-a29a-d034706d72f6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 49.981s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.917492] env[68674]: DEBUG oslo_concurrency.lockutils [None req-06a795e5-d487-4b51-a29a-d034706d72f6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 726.920351] env[68674]: DEBUG oslo_concurrency.lockutils [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 49.531s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.922091] env[68674]: INFO nova.compute.claims [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 726.948349] env[68674]: INFO nova.scheduler.client.report [None req-06a795e5-d487-4b51-a29a-d034706d72f6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Deleted allocations for instance 160d9aa2-048d-45a2-ab55-581c8721ac3b [ 727.099047] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2a3e121-107f-4071-866f-61081884e8ac {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.130656] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1abc4035-bd20-4e15-8976-2514a3a6eb8a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.140412] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Updating instance '77fa5a89-961b-4c84-a75e-a5be0253677e' progress to 83 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 727.156606] env[68674]: DEBUG oslo_vmware.api [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Task: {'id': task-3239833, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.285023] env[68674]: DEBUG oslo_vmware.api [None req-cf815e99-84d7-4d65-aaad-e084b5ab14db tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3239834, 'name': ReconfigVM_Task, 'duration_secs': 0.154706} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.285437] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf815e99-84d7-4d65-aaad-e084b5ab14db tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647504', 'volume_id': '600a9d1f-0ffd-4d51-89b0-9bdf3c055337', 'name': 'volume-600a9d1f-0ffd-4d51-89b0-9bdf3c055337', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'fa89e0b5-590d-43fb-bb11-02f8fdee0c2f', 'attached_at': '', 'detached_at': '', 'volume_id': '600a9d1f-0ffd-4d51-89b0-9bdf3c055337', 'serial': '600a9d1f-0ffd-4d51-89b0-9bdf3c055337'} {{(pid=68674) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 727.322132] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3064beae-94a2-4818-acbd-5ce58540e841 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Acquiring lock "1b405b1f-ee1f-4e6e-9355-de8b5c26ab49" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 727.322553] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3064beae-94a2-4818-acbd-5ce58540e841 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Lock "1b405b1f-ee1f-4e6e-9355-de8b5c26ab49" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 727.322852] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3064beae-94a2-4818-acbd-5ce58540e841 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Acquiring lock "1b405b1f-ee1f-4e6e-9355-de8b5c26ab49-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 727.323174] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3064beae-94a2-4818-acbd-5ce58540e841 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Lock "1b405b1f-ee1f-4e6e-9355-de8b5c26ab49-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 727.323466] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3064beae-94a2-4818-acbd-5ce58540e841 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Lock "1b405b1f-ee1f-4e6e-9355-de8b5c26ab49-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 727.325785] env[68674]: DEBUG oslo_vmware.api [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239835, 'name': PowerOffVM_Task, 'duration_secs': 0.189426} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.326214] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 727.326525] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 727.326917] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-017d86ed-1d7d-4d4d-8efc-cdf175800bae {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.329843] env[68674]: INFO nova.compute.manager [None req-3064beae-94a2-4818-acbd-5ce58540e841 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Terminating instance [ 727.402507] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 727.402698] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 727.403713] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Deleting the datastore file [datastore2] d88ccf9b-7432-4be0-82f7-b2a9155f7d86 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 727.403713] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-57a8d4a2-ed52-460a-9766-44463893aca0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.410309] env[68674]: DEBUG oslo_vmware.api [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 727.410309] env[68674]: value = "task-3239837" [ 727.410309] env[68674]: _type = "Task" [ 727.410309] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.418159] env[68674]: DEBUG oslo_vmware.api [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239837, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.458366] env[68674]: DEBUG oslo_concurrency.lockutils [None req-06a795e5-d487-4b51-a29a-d034706d72f6 tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lock "160d9aa2-048d-45a2-ab55-581c8721ac3b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 53.986s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 727.649979] env[68674]: DEBUG oslo_vmware.api [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Task: {'id': task-3239833, 'name': PowerOnVM_Task, 'duration_secs': 0.520065} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.651772] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 727.652088] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 727.652315] env[68674]: INFO nova.compute.manager [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Took 9.03 seconds to spawn the instance on the hypervisor. [ 727.652505] env[68674]: DEBUG nova.compute.manager [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 727.652760] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cfd81a78-d0ba-45de-85d9-1b6f5b384b6a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.655112] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d9e3e62-886c-4bc0-8388-2ada8a42308d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.664990] env[68674]: DEBUG oslo_vmware.api [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 727.664990] env[68674]: value = "task-3239838" [ 727.664990] env[68674]: _type = "Task" [ 727.664990] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.678341] env[68674]: DEBUG oslo_vmware.api [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3239838, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.835472] env[68674]: DEBUG nova.compute.manager [None req-3064beae-94a2-4818-acbd-5ce58540e841 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 727.835801] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3064beae-94a2-4818-acbd-5ce58540e841 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 727.837117] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccfc7270-216f-49e5-9d63-9a709aecdae2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.846669] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3064beae-94a2-4818-acbd-5ce58540e841 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 727.848526] env[68674]: DEBUG nova.objects.instance [None req-cf815e99-84d7-4d65-aaad-e084b5ab14db tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Lazy-loading 'flavor' on Instance uuid fa89e0b5-590d-43fb-bb11-02f8fdee0c2f {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 727.850294] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8191ac67-e7e3-4d88-a52b-ce583ba54f5a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.915759] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3064beae-94a2-4818-acbd-5ce58540e841 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 727.915994] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3064beae-94a2-4818-acbd-5ce58540e841 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 727.916170] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-3064beae-94a2-4818-acbd-5ce58540e841 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Deleting the datastore file [datastore2] 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 727.916820] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7b76bf08-8c25-4611-b1cf-a45571bcfd39 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.928468] env[68674]: DEBUG oslo_vmware.api [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3239837, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.253311} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.930121] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 727.930322] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 727.930531] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 727.930708] env[68674]: INFO nova.compute.manager [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Took 1.14 seconds to destroy the instance on the hypervisor. [ 727.930958] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 727.933955] env[68674]: DEBUG oslo_vmware.api [None req-3064beae-94a2-4818-acbd-5ce58540e841 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Waiting for the task: (returnval){ [ 727.933955] env[68674]: value = "task-3239840" [ 727.933955] env[68674]: _type = "Task" [ 727.933955] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.934358] env[68674]: DEBUG nova.compute.manager [-] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 727.934497] env[68674]: DEBUG nova.network.neutron [-] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 727.944591] env[68674]: DEBUG oslo_vmware.api [None req-3064beae-94a2-4818-acbd-5ce58540e841 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': task-3239840, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.185981] env[68674]: INFO nova.compute.manager [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Took 54.73 seconds to build instance. [ 728.192315] env[68674]: DEBUG oslo_vmware.api [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3239838, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.272918] env[68674]: DEBUG nova.compute.manager [req-b8f38e46-b8ad-4676-b1ca-11a624e84943 req-521e97cb-8a09-4f74-83d9-88738e1c2011 service nova] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Received event network-vif-deleted-61791433-aab7-4244-91a2-6caef49a0717 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 728.272918] env[68674]: INFO nova.compute.manager [req-b8f38e46-b8ad-4676-b1ca-11a624e84943 req-521e97cb-8a09-4f74-83d9-88738e1c2011 service nova] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Neutron deleted interface 61791433-aab7-4244-91a2-6caef49a0717; detaching it from the instance and deleting it from the info cache [ 728.272918] env[68674]: DEBUG nova.network.neutron [req-b8f38e46-b8ad-4676-b1ca-11a624e84943 req-521e97cb-8a09-4f74-83d9-88738e1c2011 service nova] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.293025] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e5d9b7e7-2602-4cfb-973d-8ba894264e4c tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquiring lock "e75d2bc7-f356-4443-9641-d9ebf35843cd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 728.293229] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e5d9b7e7-2602-4cfb-973d-8ba894264e4c tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lock "e75d2bc7-f356-4443-9641-d9ebf35843cd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 728.293446] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e5d9b7e7-2602-4cfb-973d-8ba894264e4c tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquiring lock "e75d2bc7-f356-4443-9641-d9ebf35843cd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 728.293624] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e5d9b7e7-2602-4cfb-973d-8ba894264e4c tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lock "e75d2bc7-f356-4443-9641-d9ebf35843cd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 728.293899] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e5d9b7e7-2602-4cfb-973d-8ba894264e4c tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lock "e75d2bc7-f356-4443-9641-d9ebf35843cd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 728.296543] env[68674]: INFO nova.compute.manager [None req-e5d9b7e7-2602-4cfb-973d-8ba894264e4c tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Terminating instance [ 728.445568] env[68674]: DEBUG oslo_vmware.api [None req-3064beae-94a2-4818-acbd-5ce58540e841 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': task-3239840, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176056} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.445822] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-3064beae-94a2-4818-acbd-5ce58540e841 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 728.446010] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3064beae-94a2-4818-acbd-5ce58540e841 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 728.446200] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3064beae-94a2-4818-acbd-5ce58540e841 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 728.446371] env[68674]: INFO nova.compute.manager [None req-3064beae-94a2-4818-acbd-5ce58540e841 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Took 0.61 seconds to destroy the instance on the hypervisor. [ 728.446631] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3064beae-94a2-4818-acbd-5ce58540e841 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 728.449062] env[68674]: DEBUG nova.compute.manager [-] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 728.449161] env[68674]: DEBUG nova.network.neutron [-] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 728.552950] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2579dfa6-6ad9-4d11-94d8-42e41d0ca8b1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.562058] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-200192c5-78f1-42f9-ab23-b4acad74bd26 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.594911] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-534340d0-5907-4088-9615-073379904fb0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.602219] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d65f059-91b8-4d3a-9b04-a4ba0be86991 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.616735] env[68674]: DEBUG nova.compute.provider_tree [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 728.680650] env[68674]: DEBUG oslo_vmware.api [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3239838, 'name': PowerOnVM_Task, 'duration_secs': 0.591003} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.680650] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 728.680835] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-cbd787f3-ecf2-4c0a-bbb8-448bd65bf3f9 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Updating instance '77fa5a89-961b-4c84-a75e-a5be0253677e' progress to 100 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 728.692561] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5cce01a0-6704-49fc-8cfb-7b1a0b8a1607 tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Lock "3c8459db-cc54-4644-8e4c-83c87017a186" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 97.816s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 728.746345] env[68674]: DEBUG nova.network.neutron [-] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.776671] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b16cb8e7-cba3-44bc-af85-77305f3c5034 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.786559] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe47660a-36b2-4c56-97bb-98690104ffed {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.800107] env[68674]: DEBUG nova.compute.manager [None req-e5d9b7e7-2602-4cfb-973d-8ba894264e4c tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 728.800306] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e5d9b7e7-2602-4cfb-973d-8ba894264e4c tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 728.801105] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e486e5a-666b-4e90-85b2-054e7a2fb053 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.808241] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5d9b7e7-2602-4cfb-973d-8ba894264e4c tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 728.808419] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7108b13e-075a-43f4-8182-4148a098cf34 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.822017] env[68674]: DEBUG nova.compute.manager [req-b8f38e46-b8ad-4676-b1ca-11a624e84943 req-521e97cb-8a09-4f74-83d9-88738e1c2011 service nova] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Detach interface failed, port_id=61791433-aab7-4244-91a2-6caef49a0717, reason: Instance d88ccf9b-7432-4be0-82f7-b2a9155f7d86 could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 728.827280] env[68674]: DEBUG oslo_vmware.api [None req-e5d9b7e7-2602-4cfb-973d-8ba894264e4c tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for the task: (returnval){ [ 728.827280] env[68674]: value = "task-3239841" [ 728.827280] env[68674]: _type = "Task" [ 728.827280] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.835398] env[68674]: DEBUG oslo_vmware.api [None req-e5d9b7e7-2602-4cfb-973d-8ba894264e4c tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239841, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.858967] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cf815e99-84d7-4d65-aaad-e084b5ab14db tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Lock "fa89e0b5-590d-43fb-bb11-02f8fdee0c2f" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.266s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 729.119799] env[68674]: DEBUG nova.scheduler.client.report [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 729.195229] env[68674]: DEBUG nova.compute.manager [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 729.223397] env[68674]: DEBUG nova.network.neutron [-] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.249676] env[68674]: INFO nova.compute.manager [-] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Took 1.32 seconds to deallocate network for instance. [ 729.337972] env[68674]: DEBUG oslo_vmware.api [None req-e5d9b7e7-2602-4cfb-973d-8ba894264e4c tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239841, 'name': PowerOffVM_Task, 'duration_secs': 0.326779} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.338145] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5d9b7e7-2602-4cfb-973d-8ba894264e4c tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 729.338315] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e5d9b7e7-2602-4cfb-973d-8ba894264e4c tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 729.338799] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bbaeab39-ec40-4731-8e64-6e9108e32328 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.435719] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e5d9b7e7-2602-4cfb-973d-8ba894264e4c tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 729.435952] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e5d9b7e7-2602-4cfb-973d-8ba894264e4c tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 729.436149] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5d9b7e7-2602-4cfb-973d-8ba894264e4c tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Deleting the datastore file [datastore2] e75d2bc7-f356-4443-9641-d9ebf35843cd {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 729.436415] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a8db0ad4-fc2e-483d-96f0-e0db97b3b026 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.442839] env[68674]: DEBUG oslo_vmware.api [None req-e5d9b7e7-2602-4cfb-973d-8ba894264e4c tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for the task: (returnval){ [ 729.442839] env[68674]: value = "task-3239843" [ 729.442839] env[68674]: _type = "Task" [ 729.442839] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.450937] env[68674]: DEBUG oslo_vmware.api [None req-e5d9b7e7-2602-4cfb-973d-8ba894264e4c tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239843, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.509147] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2798bd1e-0e61-410b-8dbe-e2baf22308ab tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Acquiring lock "interface-3c8459db-cc54-4644-8e4c-83c87017a186-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 729.509438] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2798bd1e-0e61-410b-8dbe-e2baf22308ab tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Lock "interface-3c8459db-cc54-4644-8e4c-83c87017a186-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 729.509770] env[68674]: DEBUG nova.objects.instance [None req-2798bd1e-0e61-410b-8dbe-e2baf22308ab tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Lazy-loading 'flavor' on Instance uuid 3c8459db-cc54-4644-8e4c-83c87017a186 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 729.540944] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Acquiring lock "5bd42044-84f5-4f48-aa97-b7cf990ed35d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 729.540944] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Lock "5bd42044-84f5-4f48-aa97-b7cf990ed35d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 729.625900] env[68674]: DEBUG oslo_concurrency.lockutils [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.705s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 729.626447] env[68674]: DEBUG nova.compute.manager [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 729.629152] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.154s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 729.630595] env[68674]: INFO nova.compute.claims [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 729.718022] env[68674]: DEBUG oslo_concurrency.lockutils [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 729.725589] env[68674]: INFO nova.compute.manager [-] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Took 1.28 seconds to deallocate network for instance. [ 729.755972] env[68674]: DEBUG oslo_concurrency.lockutils [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 729.953349] env[68674]: DEBUG oslo_vmware.api [None req-e5d9b7e7-2602-4cfb-973d-8ba894264e4c tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Task: {'id': task-3239843, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151247} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.953621] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5d9b7e7-2602-4cfb-973d-8ba894264e4c tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 729.953812] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e5d9b7e7-2602-4cfb-973d-8ba894264e4c tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 729.953987] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e5d9b7e7-2602-4cfb-973d-8ba894264e4c tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 729.954181] env[68674]: INFO nova.compute.manager [None req-e5d9b7e7-2602-4cfb-973d-8ba894264e4c tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Took 1.15 seconds to destroy the instance on the hypervisor. [ 729.954429] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e5d9b7e7-2602-4cfb-973d-8ba894264e4c tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 729.954693] env[68674]: DEBUG nova.compute.manager [-] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 729.954804] env[68674]: DEBUG nova.network.neutron [-] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 730.014811] env[68674]: DEBUG nova.objects.instance [None req-2798bd1e-0e61-410b-8dbe-e2baf22308ab tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Lazy-loading 'pci_requests' on Instance uuid 3c8459db-cc54-4644-8e4c-83c87017a186 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 730.140998] env[68674]: DEBUG nova.compute.utils [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 730.142267] env[68674]: DEBUG nova.compute.manager [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 730.142267] env[68674]: DEBUG nova.network.neutron [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 730.219719] env[68674]: DEBUG nova.policy [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e6db47ba02974c488080c4e454ed045c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b43bb0fe5b1c4383b5089eb59db781ef', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 730.231828] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3064beae-94a2-4818-acbd-5ce58540e841 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 730.519761] env[68674]: DEBUG nova.objects.base [None req-2798bd1e-0e61-410b-8dbe-e2baf22308ab tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Object Instance<3c8459db-cc54-4644-8e4c-83c87017a186> lazy-loaded attributes: flavor,pci_requests {{(pid=68674) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 730.519985] env[68674]: DEBUG nova.network.neutron [None req-2798bd1e-0e61-410b-8dbe-e2baf22308ab tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 730.549875] env[68674]: DEBUG nova.compute.manager [req-ca4e108d-96cb-4299-8198-83574c580f93 req-e268eb95-3d4c-4279-8633-f329aad13878 service nova] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Received event network-vif-deleted-1ac32e60-87ff-4d72-ad65-cd7f723a0bfa {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 730.642083] env[68674]: DEBUG nova.compute.manager [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 730.675539] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2798bd1e-0e61-410b-8dbe-e2baf22308ab tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Lock "interface-3c8459db-cc54-4644-8e4c-83c87017a186-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.164s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 730.754396] env[68674]: DEBUG nova.network.neutron [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Successfully created port: 6ef03ee8-7859-4976-be77-54e193e997a1 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 730.831377] env[68674]: DEBUG nova.network.neutron [-] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.250465] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3273dae7-375a-4627-b47d-87d2d51f393a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.258196] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc5caee1-56b5-43e5-9e2c-2f7de0c3f4ae {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.292122] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ece104bc-79ef-4ab0-8823-423c015cacce {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.300869] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83d7286f-4230-47c5-87e3-92aa62b89161 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.313350] env[68674]: DEBUG nova.compute.provider_tree [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 731.333798] env[68674]: INFO nova.compute.manager [-] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Took 1.38 seconds to deallocate network for instance. [ 731.655491] env[68674]: DEBUG nova.compute.manager [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 731.668321] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ba0be2de-5174-4a03-a086-9e1cb84f7775 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquiring lock "77fa5a89-961b-4c84-a75e-a5be0253677e" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 731.668628] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ba0be2de-5174-4a03-a086-9e1cb84f7775 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lock "77fa5a89-961b-4c84-a75e-a5be0253677e" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 731.668906] env[68674]: DEBUG nova.compute.manager [None req-ba0be2de-5174-4a03-a086-9e1cb84f7775 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Going to confirm migration 1 {{(pid=68674) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 731.681142] env[68674]: DEBUG nova.virt.hardware [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 731.681428] env[68674]: DEBUG nova.virt.hardware [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 731.681588] env[68674]: DEBUG nova.virt.hardware [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 731.681820] env[68674]: DEBUG nova.virt.hardware [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 731.681985] env[68674]: DEBUG nova.virt.hardware [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 731.682154] env[68674]: DEBUG nova.virt.hardware [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 731.682396] env[68674]: DEBUG nova.virt.hardware [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 731.682560] env[68674]: DEBUG nova.virt.hardware [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 731.682729] env[68674]: DEBUG nova.virt.hardware [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 731.682892] env[68674]: DEBUG nova.virt.hardware [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 731.683097] env[68674]: DEBUG nova.virt.hardware [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 731.686895] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e2488e1-e2ba-48e6-81cb-2c1f7923a2ac {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.696889] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6e4dafe-289d-4482-a6a3-43cceea8c08c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.816297] env[68674]: DEBUG nova.scheduler.client.report [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 731.841511] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e5d9b7e7-2602-4cfb-973d-8ba894264e4c tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 732.321757] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.692s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 732.322119] env[68674]: DEBUG nova.compute.manager [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 732.326198] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ba0be2de-5174-4a03-a086-9e1cb84f7775 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquiring lock "refresh_cache-77fa5a89-961b-4c84-a75e-a5be0253677e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.326198] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ba0be2de-5174-4a03-a086-9e1cb84f7775 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquired lock "refresh_cache-77fa5a89-961b-4c84-a75e-a5be0253677e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 732.326531] env[68674]: DEBUG nova.network.neutron [None req-ba0be2de-5174-4a03-a086-9e1cb84f7775 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 732.326531] env[68674]: DEBUG nova.objects.instance [None req-ba0be2de-5174-4a03-a086-9e1cb84f7775 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lazy-loading 'info_cache' on Instance uuid 77fa5a89-961b-4c84-a75e-a5be0253677e {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 732.331019] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b2ed5f58-10ae-4dc5-89b5-3c4043ddd941 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.747s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 732.331019] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b2ed5f58-10ae-4dc5-89b5-3c4043ddd941 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 732.331019] env[68674]: DEBUG oslo_concurrency.lockutils [None req-faea448a-f1f7-4957-b360-c496baa5245f tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.719s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 732.331019] env[68674]: DEBUG oslo_concurrency.lockutils [None req-faea448a-f1f7-4957-b360-c496baa5245f tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 732.332456] env[68674]: DEBUG oslo_concurrency.lockutils [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.385s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 732.334431] env[68674]: INFO nova.compute.claims [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 732.375050] env[68674]: INFO nova.scheduler.client.report [None req-b2ed5f58-10ae-4dc5-89b5-3c4043ddd941 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Deleted allocations for instance 275cdfcc-06f0-4c29-b18b-55cde38480a3 [ 732.380876] env[68674]: INFO nova.scheduler.client.report [None req-faea448a-f1f7-4957-b360-c496baa5245f tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Deleted allocations for instance b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14 [ 732.492310] env[68674]: DEBUG nova.network.neutron [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Successfully updated port: 6ef03ee8-7859-4976-be77-54e193e997a1 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 732.577802] env[68674]: DEBUG nova.compute.manager [req-2fbc7bb7-e975-4989-a445-087030f77e05 req-270a3fc9-1f8e-4cb0-bf0a-2dc3d9b89e24 service nova] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Received event network-vif-deleted-81afa256-db6b-44e2-944a-7654579b8b50 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 732.577994] env[68674]: DEBUG nova.compute.manager [req-2fbc7bb7-e975-4989-a445-087030f77e05 req-270a3fc9-1f8e-4cb0-bf0a-2dc3d9b89e24 service nova] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Received event network-vif-plugged-6ef03ee8-7859-4976-be77-54e193e997a1 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 732.578234] env[68674]: DEBUG oslo_concurrency.lockutils [req-2fbc7bb7-e975-4989-a445-087030f77e05 req-270a3fc9-1f8e-4cb0-bf0a-2dc3d9b89e24 service nova] Acquiring lock "0eaf7d72-755b-4977-8f71-7d53ad1cf573-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 732.578394] env[68674]: DEBUG oslo_concurrency.lockutils [req-2fbc7bb7-e975-4989-a445-087030f77e05 req-270a3fc9-1f8e-4cb0-bf0a-2dc3d9b89e24 service nova] Lock "0eaf7d72-755b-4977-8f71-7d53ad1cf573-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 732.578558] env[68674]: DEBUG oslo_concurrency.lockutils [req-2fbc7bb7-e975-4989-a445-087030f77e05 req-270a3fc9-1f8e-4cb0-bf0a-2dc3d9b89e24 service nova] Lock "0eaf7d72-755b-4977-8f71-7d53ad1cf573-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 732.578720] env[68674]: DEBUG nova.compute.manager [req-2fbc7bb7-e975-4989-a445-087030f77e05 req-270a3fc9-1f8e-4cb0-bf0a-2dc3d9b89e24 service nova] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] No waiting events found dispatching network-vif-plugged-6ef03ee8-7859-4976-be77-54e193e997a1 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 732.578879] env[68674]: WARNING nova.compute.manager [req-2fbc7bb7-e975-4989-a445-087030f77e05 req-270a3fc9-1f8e-4cb0-bf0a-2dc3d9b89e24 service nova] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Received unexpected event network-vif-plugged-6ef03ee8-7859-4976-be77-54e193e997a1 for instance with vm_state building and task_state spawning. [ 732.579098] env[68674]: DEBUG nova.compute.manager [req-2fbc7bb7-e975-4989-a445-087030f77e05 req-270a3fc9-1f8e-4cb0-bf0a-2dc3d9b89e24 service nova] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Received event network-changed-6ef03ee8-7859-4976-be77-54e193e997a1 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 732.579259] env[68674]: DEBUG nova.compute.manager [req-2fbc7bb7-e975-4989-a445-087030f77e05 req-270a3fc9-1f8e-4cb0-bf0a-2dc3d9b89e24 service nova] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Refreshing instance network info cache due to event network-changed-6ef03ee8-7859-4976-be77-54e193e997a1. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 732.579457] env[68674]: DEBUG oslo_concurrency.lockutils [req-2fbc7bb7-e975-4989-a445-087030f77e05 req-270a3fc9-1f8e-4cb0-bf0a-2dc3d9b89e24 service nova] Acquiring lock "refresh_cache-0eaf7d72-755b-4977-8f71-7d53ad1cf573" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.579595] env[68674]: DEBUG oslo_concurrency.lockutils [req-2fbc7bb7-e975-4989-a445-087030f77e05 req-270a3fc9-1f8e-4cb0-bf0a-2dc3d9b89e24 service nova] Acquired lock "refresh_cache-0eaf7d72-755b-4977-8f71-7d53ad1cf573" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 732.579751] env[68674]: DEBUG nova.network.neutron [req-2fbc7bb7-e975-4989-a445-087030f77e05 req-270a3fc9-1f8e-4cb0-bf0a-2dc3d9b89e24 service nova] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Refreshing network info cache for port 6ef03ee8-7859-4976-be77-54e193e997a1 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 732.681732] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7ce387fe-db22-4619-9fd2-847beedf68df tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Acquiring lock "3c8459db-cc54-4644-8e4c-83c87017a186" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 732.682364] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7ce387fe-db22-4619-9fd2-847beedf68df tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Lock "3c8459db-cc54-4644-8e4c-83c87017a186" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 732.682628] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7ce387fe-db22-4619-9fd2-847beedf68df tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Acquiring lock "3c8459db-cc54-4644-8e4c-83c87017a186-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 732.682862] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7ce387fe-db22-4619-9fd2-847beedf68df tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Lock "3c8459db-cc54-4644-8e4c-83c87017a186-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 732.683074] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7ce387fe-db22-4619-9fd2-847beedf68df tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Lock "3c8459db-cc54-4644-8e4c-83c87017a186-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 732.685286] env[68674]: INFO nova.compute.manager [None req-7ce387fe-db22-4619-9fd2-847beedf68df tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Terminating instance [ 732.842054] env[68674]: DEBUG nova.compute.utils [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 732.845423] env[68674]: DEBUG nova.compute.manager [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 732.845605] env[68674]: DEBUG nova.network.neutron [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 732.884828] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b2ed5f58-10ae-4dc5-89b5-3c4043ddd941 tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Lock "275cdfcc-06f0-4c29-b18b-55cde38480a3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.131s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 732.890524] env[68674]: DEBUG oslo_concurrency.lockutils [None req-faea448a-f1f7-4957-b360-c496baa5245f tempest-ListServersNegativeTestJSON-2025065306 tempest-ListServersNegativeTestJSON-2025065306-project-member] Lock "b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.228s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 732.899573] env[68674]: DEBUG nova.policy [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5cf958d2c6c14ab99c90d41a6f300d7c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c87ef02334014ade962842a0b5ff355d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 732.998084] env[68674]: DEBUG oslo_concurrency.lockutils [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Acquiring lock "refresh_cache-0eaf7d72-755b-4977-8f71-7d53ad1cf573" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.129563] env[68674]: DEBUG nova.network.neutron [req-2fbc7bb7-e975-4989-a445-087030f77e05 req-270a3fc9-1f8e-4cb0-bf0a-2dc3d9b89e24 service nova] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 733.189073] env[68674]: DEBUG nova.compute.manager [None req-7ce387fe-db22-4619-9fd2-847beedf68df tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 733.189331] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7ce387fe-db22-4619-9fd2-847beedf68df tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 733.190265] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f6238ef-c060-45e1-81f7-13bc2bf087c6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.204957] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ce387fe-db22-4619-9fd2-847beedf68df tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 733.205225] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f602a1fa-3130-4355-ac99-03cd5925d132 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.212098] env[68674]: DEBUG oslo_vmware.api [None req-7ce387fe-db22-4619-9fd2-847beedf68df tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Waiting for the task: (returnval){ [ 733.212098] env[68674]: value = "task-3239844" [ 733.212098] env[68674]: _type = "Task" [ 733.212098] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.221636] env[68674]: DEBUG oslo_vmware.api [None req-7ce387fe-db22-4619-9fd2-847beedf68df tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Task: {'id': task-3239844, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.258663] env[68674]: DEBUG nova.network.neutron [req-2fbc7bb7-e975-4989-a445-087030f77e05 req-270a3fc9-1f8e-4cb0-bf0a-2dc3d9b89e24 service nova] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.350706] env[68674]: DEBUG nova.compute.manager [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 733.356392] env[68674]: DEBUG nova.network.neutron [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Successfully created port: 6670eed2-509b-430f-a00f-46293f18bba9 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 733.726372] env[68674]: DEBUG oslo_vmware.api [None req-7ce387fe-db22-4619-9fd2-847beedf68df tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Task: {'id': task-3239844, 'name': PowerOffVM_Task, 'duration_secs': 0.235358} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.726848] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ce387fe-db22-4619-9fd2-847beedf68df tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 733.727115] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7ce387fe-db22-4619-9fd2-847beedf68df tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 733.727402] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b8b70383-03a8-496f-9994-d6205b196305 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.761832] env[68674]: DEBUG oslo_concurrency.lockutils [req-2fbc7bb7-e975-4989-a445-087030f77e05 req-270a3fc9-1f8e-4cb0-bf0a-2dc3d9b89e24 service nova] Releasing lock "refresh_cache-0eaf7d72-755b-4977-8f71-7d53ad1cf573" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 733.762230] env[68674]: DEBUG oslo_concurrency.lockutils [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Acquired lock "refresh_cache-0eaf7d72-755b-4977-8f71-7d53ad1cf573" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 733.762498] env[68674]: DEBUG nova.network.neutron [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 733.790887] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7ce387fe-db22-4619-9fd2-847beedf68df tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 733.790887] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7ce387fe-db22-4619-9fd2-847beedf68df tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 733.794124] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ce387fe-db22-4619-9fd2-847beedf68df tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Deleting the datastore file [datastore2] 3c8459db-cc54-4644-8e4c-83c87017a186 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 733.794124] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-edf07c71-014d-4c4f-960f-d8373d74e352 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.798223] env[68674]: DEBUG oslo_vmware.api [None req-7ce387fe-db22-4619-9fd2-847beedf68df tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Waiting for the task: (returnval){ [ 733.798223] env[68674]: value = "task-3239846" [ 733.798223] env[68674]: _type = "Task" [ 733.798223] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.808416] env[68674]: DEBUG oslo_vmware.api [None req-7ce387fe-db22-4619-9fd2-847beedf68df tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Task: {'id': task-3239846, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.994797] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc06ca6e-54a4-4063-85c5-0fdafc73bf2b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.002174] env[68674]: DEBUG nova.network.neutron [None req-ba0be2de-5174-4a03-a086-9e1cb84f7775 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Updating instance_info_cache with network_info: [{"id": "b1a3099e-550f-4bc4-a4b5-1fe1e04ea342", "address": "fa:16:3e:dc:4d:99", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1a3099e-55", "ovs_interfaceid": "b1a3099e-550f-4bc4-a4b5-1fe1e04ea342", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.007339] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2ec09f0-4cfa-414e-9150-7b5f99ce43de {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.044476] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-264b4b62-5831-4d46-9c63-f9703232b7ce {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.053950] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-381b5664-3c20-4aab-a3bf-1e1cd1812bb3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.067127] env[68674]: DEBUG nova.compute.provider_tree [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 734.293823] env[68674]: DEBUG nova.network.neutron [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 734.307911] env[68674]: DEBUG oslo_vmware.api [None req-7ce387fe-db22-4619-9fd2-847beedf68df tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Task: {'id': task-3239846, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.128229} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.307911] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ce387fe-db22-4619-9fd2-847beedf68df tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 734.308228] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7ce387fe-db22-4619-9fd2-847beedf68df tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 734.308535] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7ce387fe-db22-4619-9fd2-847beedf68df tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 734.308744] env[68674]: INFO nova.compute.manager [None req-7ce387fe-db22-4619-9fd2-847beedf68df tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Took 1.12 seconds to destroy the instance on the hypervisor. [ 734.309082] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7ce387fe-db22-4619-9fd2-847beedf68df tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 734.309341] env[68674]: DEBUG nova.compute.manager [-] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 734.309483] env[68674]: DEBUG nova.network.neutron [-] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 734.370145] env[68674]: DEBUG nova.compute.manager [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 734.400287] env[68674]: DEBUG nova.virt.hardware [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 734.400562] env[68674]: DEBUG nova.virt.hardware [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 734.400721] env[68674]: DEBUG nova.virt.hardware [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 734.400901] env[68674]: DEBUG nova.virt.hardware [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 734.401065] env[68674]: DEBUG nova.virt.hardware [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 734.401254] env[68674]: DEBUG nova.virt.hardware [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 734.401472] env[68674]: DEBUG nova.virt.hardware [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 734.401633] env[68674]: DEBUG nova.virt.hardware [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 734.401805] env[68674]: DEBUG nova.virt.hardware [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 734.402715] env[68674]: DEBUG nova.virt.hardware [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 734.402715] env[68674]: DEBUG nova.virt.hardware [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 734.403050] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3cc1a20-9b9e-4303-a8dd-713a245cc220 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.413020] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-960cf7ae-05cb-40ef-aa96-7e6d7f2476a2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.469963] env[68674]: DEBUG nova.network.neutron [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Updating instance_info_cache with network_info: [{"id": "6ef03ee8-7859-4976-be77-54e193e997a1", "address": "fa:16:3e:f8:aa:49", "network": {"id": "91238196-dc46-43a7-b28d-69c5b89ab844", "bridge": "br-int", "label": "tempest-ServersTestJSON-1435761967-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b43bb0fe5b1c4383b5089eb59db781ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d70692eb-97b3-417c-a4ca-1ee888246ad9", "external-id": "nsx-vlan-transportzone-342", "segmentation_id": 342, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ef03ee8-78", "ovs_interfaceid": "6ef03ee8-7859-4976-be77-54e193e997a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.517206] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ba0be2de-5174-4a03-a086-9e1cb84f7775 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Releasing lock "refresh_cache-77fa5a89-961b-4c84-a75e-a5be0253677e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 734.517480] env[68674]: DEBUG nova.objects.instance [None req-ba0be2de-5174-4a03-a086-9e1cb84f7775 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lazy-loading 'migration_context' on Instance uuid 77fa5a89-961b-4c84-a75e-a5be0253677e {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 734.570388] env[68674]: DEBUG nova.scheduler.client.report [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 734.782742] env[68674]: DEBUG nova.compute.manager [req-9efebb8f-164d-4af5-b5ea-3b745db3b53d req-7883f081-d15f-484c-9e9f-182e9b7e63b6 service nova] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Received event network-vif-deleted-1dd86562-12e7-45c7-9bc1-e9b4ed28e43e {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 734.782947] env[68674]: INFO nova.compute.manager [req-9efebb8f-164d-4af5-b5ea-3b745db3b53d req-7883f081-d15f-484c-9e9f-182e9b7e63b6 service nova] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Neutron deleted interface 1dd86562-12e7-45c7-9bc1-e9b4ed28e43e; detaching it from the instance and deleting it from the info cache [ 734.783597] env[68674]: DEBUG nova.network.neutron [req-9efebb8f-164d-4af5-b5ea-3b745db3b53d req-7883f081-d15f-484c-9e9f-182e9b7e63b6 service nova] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.975233] env[68674]: DEBUG oslo_concurrency.lockutils [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Releasing lock "refresh_cache-0eaf7d72-755b-4977-8f71-7d53ad1cf573" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 734.975616] env[68674]: DEBUG nova.compute.manager [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Instance network_info: |[{"id": "6ef03ee8-7859-4976-be77-54e193e997a1", "address": "fa:16:3e:f8:aa:49", "network": {"id": "91238196-dc46-43a7-b28d-69c5b89ab844", "bridge": "br-int", "label": "tempest-ServersTestJSON-1435761967-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b43bb0fe5b1c4383b5089eb59db781ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d70692eb-97b3-417c-a4ca-1ee888246ad9", "external-id": "nsx-vlan-transportzone-342", "segmentation_id": 342, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ef03ee8-78", "ovs_interfaceid": "6ef03ee8-7859-4976-be77-54e193e997a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 734.976180] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:aa:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd70692eb-97b3-417c-a4ca-1ee888246ad9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6ef03ee8-7859-4976-be77-54e193e997a1', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 734.984286] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Creating folder: Project (b43bb0fe5b1c4383b5089eb59db781ef). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 734.984406] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-308a5a3d-e197-43c9-a0b8-6329165c2f55 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.995457] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Created folder: Project (b43bb0fe5b1c4383b5089eb59db781ef) in parent group-v647377. [ 734.995678] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Creating folder: Instances. Parent ref: group-v647508. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 734.995915] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ab3dc9aa-6eb2-4890-8c5d-1ca69d194122 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.004166] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Created folder: Instances in parent group-v647508. [ 735.004506] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 735.004701] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 735.004896] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-30d5636a-1dea-447b-bec4-0c72fa060722 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.019869] env[68674]: DEBUG nova.objects.base [None req-ba0be2de-5174-4a03-a086-9e1cb84f7775 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Object Instance<77fa5a89-961b-4c84-a75e-a5be0253677e> lazy-loaded attributes: info_cache,migration_context {{(pid=68674) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 735.020749] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eeb8d3a-f31d-42c0-905f-fee66a739054 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.025157] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 735.025157] env[68674]: value = "task-3239849" [ 735.025157] env[68674]: _type = "Task" [ 735.025157] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.033811] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239849, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.049193] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b05c220c-5531-4866-a96c-5dc30d83c0aa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.054347] env[68674]: DEBUG oslo_vmware.api [None req-ba0be2de-5174-4a03-a086-9e1cb84f7775 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 735.054347] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5202f3fd-1804-3143-92d2-357e501d56b9" [ 735.054347] env[68674]: _type = "Task" [ 735.054347] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.063537] env[68674]: DEBUG oslo_vmware.api [None req-ba0be2de-5174-4a03-a086-9e1cb84f7775 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5202f3fd-1804-3143-92d2-357e501d56b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.075864] env[68674]: DEBUG oslo_concurrency.lockutils [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.743s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 735.076664] env[68674]: DEBUG nova.compute.manager [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 735.080935] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3df6c30b-92b4-4886-bc1f-a7bf5f789973 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.510s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 735.081485] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3df6c30b-92b4-4886-bc1f-a7bf5f789973 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 735.084568] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.337s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 735.086938] env[68674]: INFO nova.compute.claims [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 735.127334] env[68674]: INFO nova.scheduler.client.report [None req-3df6c30b-92b4-4886-bc1f-a7bf5f789973 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Deleted allocations for instance d167585b-11f4-462c-b12e-c6a440c1476a [ 735.270693] env[68674]: DEBUG nova.network.neutron [-] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.285785] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8833fbf5-7be1-4423-9c0d-89d36b0e6f2e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.294997] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49ec5b18-f991-4ffd-bf44-2e2b50570df5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.342160] env[68674]: DEBUG nova.compute.manager [req-9efebb8f-164d-4af5-b5ea-3b745db3b53d req-7883f081-d15f-484c-9e9f-182e9b7e63b6 service nova] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Detach interface failed, port_id=1dd86562-12e7-45c7-9bc1-e9b4ed28e43e, reason: Instance 3c8459db-cc54-4644-8e4c-83c87017a186 could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 735.391241] env[68674]: DEBUG nova.compute.manager [req-a500b15e-81ea-4c86-ac01-c88772489efe req-13c7e5bf-0fa2-4898-a1e4-adab8f2b74a5 service nova] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Received event network-vif-plugged-6670eed2-509b-430f-a00f-46293f18bba9 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 735.391515] env[68674]: DEBUG oslo_concurrency.lockutils [req-a500b15e-81ea-4c86-ac01-c88772489efe req-13c7e5bf-0fa2-4898-a1e4-adab8f2b74a5 service nova] Acquiring lock "6803af03-b1d5-47e6-9471-5213469e4103-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 735.391929] env[68674]: DEBUG oslo_concurrency.lockutils [req-a500b15e-81ea-4c86-ac01-c88772489efe req-13c7e5bf-0fa2-4898-a1e4-adab8f2b74a5 service nova] Lock "6803af03-b1d5-47e6-9471-5213469e4103-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 735.391929] env[68674]: DEBUG oslo_concurrency.lockutils [req-a500b15e-81ea-4c86-ac01-c88772489efe req-13c7e5bf-0fa2-4898-a1e4-adab8f2b74a5 service nova] Lock "6803af03-b1d5-47e6-9471-5213469e4103-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 735.392483] env[68674]: DEBUG nova.compute.manager [req-a500b15e-81ea-4c86-ac01-c88772489efe req-13c7e5bf-0fa2-4898-a1e4-adab8f2b74a5 service nova] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] No waiting events found dispatching network-vif-plugged-6670eed2-509b-430f-a00f-46293f18bba9 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 735.392483] env[68674]: WARNING nova.compute.manager [req-a500b15e-81ea-4c86-ac01-c88772489efe req-13c7e5bf-0fa2-4898-a1e4-adab8f2b74a5 service nova] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Received unexpected event network-vif-plugged-6670eed2-509b-430f-a00f-46293f18bba9 for instance with vm_state building and task_state spawning. [ 735.510576] env[68674]: DEBUG nova.network.neutron [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Successfully updated port: 6670eed2-509b-430f-a00f-46293f18bba9 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 735.538591] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239849, 'name': CreateVM_Task, 'duration_secs': 0.315937} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.538778] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 735.539623] env[68674]: DEBUG oslo_concurrency.lockutils [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.539883] env[68674]: DEBUG oslo_concurrency.lockutils [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 735.540138] env[68674]: DEBUG oslo_concurrency.lockutils [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 735.540819] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7593257f-2089-4332-8b1d-591263028f2e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.546320] env[68674]: DEBUG oslo_vmware.api [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Waiting for the task: (returnval){ [ 735.546320] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523223ea-d380-33fa-65bb-bea0d5b673e0" [ 735.546320] env[68674]: _type = "Task" [ 735.546320] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.555312] env[68674]: DEBUG oslo_vmware.api [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523223ea-d380-33fa-65bb-bea0d5b673e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.564484] env[68674]: DEBUG oslo_vmware.api [None req-ba0be2de-5174-4a03-a086-9e1cb84f7775 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5202f3fd-1804-3143-92d2-357e501d56b9, 'name': SearchDatastore_Task, 'duration_secs': 0.019902} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.564815] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ba0be2de-5174-4a03-a086-9e1cb84f7775 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 735.593553] env[68674]: DEBUG nova.compute.utils [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 735.599265] env[68674]: DEBUG nova.compute.manager [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 735.599265] env[68674]: DEBUG nova.network.neutron [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 735.639158] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3df6c30b-92b4-4886-bc1f-a7bf5f789973 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "d167585b-11f4-462c-b12e-c6a440c1476a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.035s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 735.677651] env[68674]: DEBUG nova.policy [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5cf958d2c6c14ab99c90d41a6f300d7c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c87ef02334014ade962842a0b5ff355d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 735.775918] env[68674]: INFO nova.compute.manager [-] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Took 1.46 seconds to deallocate network for instance. [ 735.970097] env[68674]: DEBUG nova.network.neutron [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Successfully created port: 5624a182-6a62-46b8-b456-e3e59a2ed84e {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 736.014571] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Acquiring lock "refresh_cache-6803af03-b1d5-47e6-9471-5213469e4103" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.014571] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Acquired lock "refresh_cache-6803af03-b1d5-47e6-9471-5213469e4103" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 736.014571] env[68674]: DEBUG nova.network.neutron [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 736.056737] env[68674]: DEBUG oslo_vmware.api [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523223ea-d380-33fa-65bb-bea0d5b673e0, 'name': SearchDatastore_Task, 'duration_secs': 0.010195} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.057062] env[68674]: DEBUG oslo_concurrency.lockutils [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 736.057655] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 736.057778] env[68674]: DEBUG oslo_concurrency.lockutils [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.057932] env[68674]: DEBUG oslo_concurrency.lockutils [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 736.058157] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 736.058422] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-73ac3941-41af-4d9a-a3b1-5ac5693052ab {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.066932] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 736.067138] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 736.067859] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a6b3359-51e2-4496-9523-50e87464019f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.074037] env[68674]: DEBUG oslo_vmware.api [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Waiting for the task: (returnval){ [ 736.074037] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52532cbd-2a37-7192-fe23-5f6136df6d4c" [ 736.074037] env[68674]: _type = "Task" [ 736.074037] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.080857] env[68674]: DEBUG oslo_vmware.api [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52532cbd-2a37-7192-fe23-5f6136df6d4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.098092] env[68674]: DEBUG nova.compute.manager [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 736.281421] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7ce387fe-db22-4619-9fd2-847beedf68df tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 736.558979] env[68674]: DEBUG nova.network.neutron [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 736.591690] env[68674]: DEBUG oslo_vmware.api [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52532cbd-2a37-7192-fe23-5f6136df6d4c, 'name': SearchDatastore_Task, 'duration_secs': 0.00929} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.592987] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80f9c7fb-a58e-4354-982f-1351c0157221 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.599984] env[68674]: DEBUG oslo_vmware.api [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Waiting for the task: (returnval){ [ 736.599984] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52faef54-e0f5-ec47-d12f-38fdabce098f" [ 736.599984] env[68674]: _type = "Task" [ 736.599984] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.610381] env[68674]: DEBUG oslo_vmware.api [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52faef54-e0f5-ec47-d12f-38fdabce098f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.637853] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87183d62-2542-464b-bcbe-a4f31c602d36 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.645023] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e2489d5-3865-4dc0-8ba6-6b6bf887161f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.679750] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24010ce3-14ae-43c2-8825-2cbe284aaa2e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.686373] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50eab276-8756-456f-a7f2-31c07d994083 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.699564] env[68674]: DEBUG nova.compute.provider_tree [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 736.767030] env[68674]: DEBUG nova.network.neutron [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Updating instance_info_cache with network_info: [{"id": "6670eed2-509b-430f-a00f-46293f18bba9", "address": "fa:16:3e:fd:cc:8f", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.221", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6670eed2-50", "ovs_interfaceid": "6670eed2-509b-430f-a00f-46293f18bba9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.109246] env[68674]: DEBUG oslo_vmware.api [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52faef54-e0f5-ec47-d12f-38fdabce098f, 'name': SearchDatastore_Task, 'duration_secs': 0.010394} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.109510] env[68674]: DEBUG oslo_concurrency.lockutils [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 737.109871] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 0eaf7d72-755b-4977-8f71-7d53ad1cf573/0eaf7d72-755b-4977-8f71-7d53ad1cf573.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 737.110033] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3d1ad819-0793-4640-9d0e-a4bfcb996be4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.115094] env[68674]: DEBUG nova.compute.manager [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 737.118225] env[68674]: DEBUG oslo_vmware.api [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Waiting for the task: (returnval){ [ 737.118225] env[68674]: value = "task-3239850" [ 737.118225] env[68674]: _type = "Task" [ 737.118225] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.126056] env[68674]: DEBUG oslo_vmware.api [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Task: {'id': task-3239850, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.141245] env[68674]: DEBUG nova.virt.hardware [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 737.141245] env[68674]: DEBUG nova.virt.hardware [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 737.141245] env[68674]: DEBUG nova.virt.hardware [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 737.141576] env[68674]: DEBUG nova.virt.hardware [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 737.141576] env[68674]: DEBUG nova.virt.hardware [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 737.141646] env[68674]: DEBUG nova.virt.hardware [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 737.141842] env[68674]: DEBUG nova.virt.hardware [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 737.142014] env[68674]: DEBUG nova.virt.hardware [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 737.142197] env[68674]: DEBUG nova.virt.hardware [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 737.142364] env[68674]: DEBUG nova.virt.hardware [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 737.142559] env[68674]: DEBUG nova.virt.hardware [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 737.143380] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0618784-44d2-4751-adab-76c0a6e802d8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.150796] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb93a5b6-c225-49a4-bf76-c7b504db5370 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.203384] env[68674]: DEBUG nova.scheduler.client.report [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 737.269712] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Releasing lock "refresh_cache-6803af03-b1d5-47e6-9471-5213469e4103" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 737.270273] env[68674]: DEBUG nova.compute.manager [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Instance network_info: |[{"id": "6670eed2-509b-430f-a00f-46293f18bba9", "address": "fa:16:3e:fd:cc:8f", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.221", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6670eed2-50", "ovs_interfaceid": "6670eed2-509b-430f-a00f-46293f18bba9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 737.270828] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fd:cc:8f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'df1bf911-aac9-4d2d-ae69-66ace3e6a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6670eed2-509b-430f-a00f-46293f18bba9', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 737.280531] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Creating folder: Project (c87ef02334014ade962842a0b5ff355d). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 737.280895] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-049d6e5b-0a22-4355-94eb-0a57e6f6187a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.291977] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Created folder: Project (c87ef02334014ade962842a0b5ff355d) in parent group-v647377. [ 737.292258] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Creating folder: Instances. Parent ref: group-v647511. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 737.292531] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b232cc7a-2ae0-4fa0-96e9-0021605a1484 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.302324] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Created folder: Instances in parent group-v647511. [ 737.302546] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 737.302789] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 737.303019] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0ec03bb6-b3cc-4bed-b84f-7784b224fc98 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.324850] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 737.324850] env[68674]: value = "task-3239853" [ 737.324850] env[68674]: _type = "Task" [ 737.324850] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.334324] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239853, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.431657] env[68674]: DEBUG nova.compute.manager [req-28040d9c-c097-4713-8db3-a1d199020190 req-e79cced0-8ba0-44ef-85cf-12d5c2636a0a service nova] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Received event network-changed-6670eed2-509b-430f-a00f-46293f18bba9 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 737.432129] env[68674]: DEBUG nova.compute.manager [req-28040d9c-c097-4713-8db3-a1d199020190 req-e79cced0-8ba0-44ef-85cf-12d5c2636a0a service nova] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Refreshing instance network info cache due to event network-changed-6670eed2-509b-430f-a00f-46293f18bba9. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 737.432516] env[68674]: DEBUG oslo_concurrency.lockutils [req-28040d9c-c097-4713-8db3-a1d199020190 req-e79cced0-8ba0-44ef-85cf-12d5c2636a0a service nova] Acquiring lock "refresh_cache-6803af03-b1d5-47e6-9471-5213469e4103" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.432957] env[68674]: DEBUG oslo_concurrency.lockutils [req-28040d9c-c097-4713-8db3-a1d199020190 req-e79cced0-8ba0-44ef-85cf-12d5c2636a0a service nova] Acquired lock "refresh_cache-6803af03-b1d5-47e6-9471-5213469e4103" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 737.433327] env[68674]: DEBUG nova.network.neutron [req-28040d9c-c097-4713-8db3-a1d199020190 req-e79cced0-8ba0-44ef-85cf-12d5c2636a0a service nova] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Refreshing network info cache for port 6670eed2-509b-430f-a00f-46293f18bba9 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 737.634460] env[68674]: DEBUG oslo_vmware.api [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Task: {'id': task-3239850, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.642687] env[68674]: DEBUG nova.network.neutron [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Successfully updated port: 5624a182-6a62-46b8-b456-e3e59a2ed84e {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 737.709362] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.625s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 737.711274] env[68674]: DEBUG nova.compute.manager [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 737.713230] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dcabb9af-8eba-4b2e-8020-70ff5c57acbc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.477s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 737.713230] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dcabb9af-8eba-4b2e-8020-70ff5c57acbc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 737.716167] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.459s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 737.717177] env[68674]: INFO nova.compute.claims [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 737.742082] env[68674]: INFO nova.scheduler.client.report [None req-dcabb9af-8eba-4b2e-8020-70ff5c57acbc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Deleted allocations for instance 0f618d12-dc7b-4739-8ace-9453a7175d75 [ 737.835117] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239853, 'name': CreateVM_Task, 'duration_secs': 0.410679} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.835117] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 737.837897] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.837897] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 737.837897] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 737.837897] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23f87a14-fab4-45c2-b944-8a2e15003fd5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.841281] env[68674]: DEBUG oslo_vmware.api [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for the task: (returnval){ [ 737.841281] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523a3121-a4be-883f-07ea-2c38a0a590c3" [ 737.841281] env[68674]: _type = "Task" [ 737.841281] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.851603] env[68674]: DEBUG oslo_vmware.api [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523a3121-a4be-883f-07ea-2c38a0a590c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.130666] env[68674]: DEBUG oslo_vmware.api [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Task: {'id': task-3239850, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.550065} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.130666] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 0eaf7d72-755b-4977-8f71-7d53ad1cf573/0eaf7d72-755b-4977-8f71-7d53ad1cf573.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 738.130666] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 738.130666] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4e6286a3-8ada-4362-ba9e-009807c4db97 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.136465] env[68674]: DEBUG oslo_vmware.api [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Waiting for the task: (returnval){ [ 738.136465] env[68674]: value = "task-3239854" [ 738.136465] env[68674]: _type = "Task" [ 738.136465] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.148974] env[68674]: DEBUG oslo_concurrency.lockutils [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Acquiring lock "refresh_cache-3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.149193] env[68674]: DEBUG oslo_concurrency.lockutils [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Acquired lock "refresh_cache-3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 738.149348] env[68674]: DEBUG nova.network.neutron [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 738.150653] env[68674]: DEBUG oslo_vmware.api [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Task: {'id': task-3239854, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.223556] env[68674]: DEBUG nova.compute.utils [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 738.227651] env[68674]: DEBUG nova.compute.manager [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 738.227837] env[68674]: DEBUG nova.network.neutron [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 738.242824] env[68674]: DEBUG nova.network.neutron [req-28040d9c-c097-4713-8db3-a1d199020190 req-e79cced0-8ba0-44ef-85cf-12d5c2636a0a service nova] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Updated VIF entry in instance network info cache for port 6670eed2-509b-430f-a00f-46293f18bba9. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 738.242824] env[68674]: DEBUG nova.network.neutron [req-28040d9c-c097-4713-8db3-a1d199020190 req-e79cced0-8ba0-44ef-85cf-12d5c2636a0a service nova] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Updating instance_info_cache with network_info: [{"id": "6670eed2-509b-430f-a00f-46293f18bba9", "address": "fa:16:3e:fd:cc:8f", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.221", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6670eed2-50", "ovs_interfaceid": "6670eed2-509b-430f-a00f-46293f18bba9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.250733] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dcabb9af-8eba-4b2e-8020-70ff5c57acbc tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Lock "0f618d12-dc7b-4739-8ace-9453a7175d75" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.911s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 738.298912] env[68674]: DEBUG nova.policy [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '26dfab4e06f2403697a602676410bbe7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38a52a0d746a4f16b5e4d5a6c984cc45', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 738.353560] env[68674]: DEBUG oslo_vmware.api [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523a3121-a4be-883f-07ea-2c38a0a590c3, 'name': SearchDatastore_Task, 'duration_secs': 0.009226} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.353905] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 738.354177] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 738.354440] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.354620] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 738.354839] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 738.355155] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8568d0b7-232e-4b1c-90d8-8ae911ab7410 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.366149] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 738.366351] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 738.367213] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0550d81d-3bed-4daf-9046-9daff11fd5c0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.375222] env[68674]: DEBUG oslo_vmware.api [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for the task: (returnval){ [ 738.375222] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521d9d06-9520-3dad-01d4-9c81e211790d" [ 738.375222] env[68674]: _type = "Task" [ 738.375222] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.383944] env[68674]: DEBUG oslo_vmware.api [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521d9d06-9520-3dad-01d4-9c81e211790d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.647156] env[68674]: DEBUG oslo_vmware.api [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Task: {'id': task-3239854, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.097957} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.647992] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 738.648819] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9958afbc-cb6b-41e1-bca4-4e5b37426113 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.678188] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] 0eaf7d72-755b-4977-8f71-7d53ad1cf573/0eaf7d72-755b-4977-8f71-7d53ad1cf573.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 738.678890] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b9108a85-533d-4601-a1a7-4e267964edcd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.693691] env[68674]: DEBUG nova.network.neutron [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Successfully created port: 8ccc7517-d8c1-44f5-a6ef-52fd819f4904 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 738.695895] env[68674]: DEBUG nova.network.neutron [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 738.703075] env[68674]: DEBUG oslo_vmware.api [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Waiting for the task: (returnval){ [ 738.703075] env[68674]: value = "task-3239855" [ 738.703075] env[68674]: _type = "Task" [ 738.703075] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.712909] env[68674]: DEBUG oslo_vmware.api [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Task: {'id': task-3239855, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.732921] env[68674]: DEBUG nova.compute.manager [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 738.745418] env[68674]: DEBUG oslo_concurrency.lockutils [req-28040d9c-c097-4713-8db3-a1d199020190 req-e79cced0-8ba0-44ef-85cf-12d5c2636a0a service nova] Releasing lock "refresh_cache-6803af03-b1d5-47e6-9471-5213469e4103" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 738.868095] env[68674]: DEBUG nova.network.neutron [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Updating instance_info_cache with network_info: [{"id": "5624a182-6a62-46b8-b456-e3e59a2ed84e", "address": "fa:16:3e:24:64:42", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.199", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5624a182-6a", "ovs_interfaceid": "5624a182-6a62-46b8-b456-e3e59a2ed84e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.894897] env[68674]: DEBUG oslo_vmware.api [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521d9d06-9520-3dad-01d4-9c81e211790d, 'name': SearchDatastore_Task, 'duration_secs': 0.028571} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.896792] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e20e367-1e40-4a4a-a90a-aded1482501d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.903203] env[68674]: DEBUG oslo_vmware.api [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for the task: (returnval){ [ 738.903203] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5275daa7-f983-2dbf-c5c3-6cce1db687b1" [ 738.903203] env[68674]: _type = "Task" [ 738.903203] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.914475] env[68674]: DEBUG oslo_vmware.api [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5275daa7-f983-2dbf-c5c3-6cce1db687b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.133433] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cc9f101a-c15b-4bc8-8ab4-e5f017a6e495 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Acquiring lock "357b515d-ef37-4688-969e-f894be30edb7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 739.133538] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cc9f101a-c15b-4bc8-8ab4-e5f017a6e495 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Lock "357b515d-ef37-4688-969e-f894be30edb7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 739.133709] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cc9f101a-c15b-4bc8-8ab4-e5f017a6e495 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Acquiring lock "357b515d-ef37-4688-969e-f894be30edb7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 739.133918] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cc9f101a-c15b-4bc8-8ab4-e5f017a6e495 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Lock "357b515d-ef37-4688-969e-f894be30edb7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 739.134098] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cc9f101a-c15b-4bc8-8ab4-e5f017a6e495 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Lock "357b515d-ef37-4688-969e-f894be30edb7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 739.138914] env[68674]: INFO nova.compute.manager [None req-cc9f101a-c15b-4bc8-8ab4-e5f017a6e495 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Terminating instance [ 739.215557] env[68674]: DEBUG oslo_vmware.api [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Task: {'id': task-3239855, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.291313] env[68674]: DEBUG oslo_concurrency.lockutils [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Acquiring lock "6278d756-139c-4fcd-bf31-304c978d6682" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 739.291595] env[68674]: DEBUG oslo_concurrency.lockutils [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Lock "6278d756-139c-4fcd-bf31-304c978d6682" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 739.291805] env[68674]: DEBUG oslo_concurrency.lockutils [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Acquiring lock "6278d756-139c-4fcd-bf31-304c978d6682-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 739.291978] env[68674]: DEBUG oslo_concurrency.lockutils [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Lock "6278d756-139c-4fcd-bf31-304c978d6682-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 739.292155] env[68674]: DEBUG oslo_concurrency.lockutils [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Lock "6278d756-139c-4fcd-bf31-304c978d6682-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 739.294168] env[68674]: INFO nova.compute.manager [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Terminating instance [ 739.303834] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5fa206c-6925-499c-86fc-4888bf7219c7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.312660] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48daea5d-e701-4ae0-a3b6-7a328a5f9f7e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.344797] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fccdafd-7506-49cd-9a96-f64fe9bbea05 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.352640] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99df412c-4916-4bae-9a80-c4182f5ce4f5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.366685] env[68674]: DEBUG nova.compute.provider_tree [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 739.372723] env[68674]: DEBUG oslo_concurrency.lockutils [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Releasing lock "refresh_cache-3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 739.373376] env[68674]: DEBUG nova.compute.manager [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Instance network_info: |[{"id": "5624a182-6a62-46b8-b456-e3e59a2ed84e", "address": "fa:16:3e:24:64:42", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.199", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5624a182-6a", "ovs_interfaceid": "5624a182-6a62-46b8-b456-e3e59a2ed84e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 739.373749] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:24:64:42', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'df1bf911-aac9-4d2d-ae69-66ace3e6a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5624a182-6a62-46b8-b456-e3e59a2ed84e', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 739.380901] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 739.381714] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 739.381927] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-88885c50-eb5d-4481-ac8b-eb9d2084ee76 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.403852] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 739.403852] env[68674]: value = "task-3239856" [ 739.403852] env[68674]: _type = "Task" [ 739.403852] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.415968] env[68674]: DEBUG oslo_vmware.api [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5275daa7-f983-2dbf-c5c3-6cce1db687b1, 'name': SearchDatastore_Task, 'duration_secs': 0.016163} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.419007] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 739.419357] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 6803af03-b1d5-47e6-9471-5213469e4103/6803af03-b1d5-47e6-9471-5213469e4103.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 739.419577] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239856, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.419800] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d4db88c9-8804-4500-afb2-220c5466a439 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.429660] env[68674]: DEBUG oslo_vmware.api [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for the task: (returnval){ [ 739.429660] env[68674]: value = "task-3239857" [ 739.429660] env[68674]: _type = "Task" [ 739.429660] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.437472] env[68674]: DEBUG oslo_vmware.api [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239857, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.462261] env[68674]: DEBUG nova.compute.manager [req-9e92d96c-80f4-47e5-8769-1487a4029a48 req-b7dee1d7-4980-4749-adb7-17be1f930d88 service nova] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Received event network-vif-plugged-5624a182-6a62-46b8-b456-e3e59a2ed84e {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 739.463301] env[68674]: DEBUG oslo_concurrency.lockutils [req-9e92d96c-80f4-47e5-8769-1487a4029a48 req-b7dee1d7-4980-4749-adb7-17be1f930d88 service nova] Acquiring lock "3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 739.463301] env[68674]: DEBUG oslo_concurrency.lockutils [req-9e92d96c-80f4-47e5-8769-1487a4029a48 req-b7dee1d7-4980-4749-adb7-17be1f930d88 service nova] Lock "3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 739.463301] env[68674]: DEBUG oslo_concurrency.lockutils [req-9e92d96c-80f4-47e5-8769-1487a4029a48 req-b7dee1d7-4980-4749-adb7-17be1f930d88 service nova] Lock "3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 739.463301] env[68674]: DEBUG nova.compute.manager [req-9e92d96c-80f4-47e5-8769-1487a4029a48 req-b7dee1d7-4980-4749-adb7-17be1f930d88 service nova] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] No waiting events found dispatching network-vif-plugged-5624a182-6a62-46b8-b456-e3e59a2ed84e {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 739.463778] env[68674]: WARNING nova.compute.manager [req-9e92d96c-80f4-47e5-8769-1487a4029a48 req-b7dee1d7-4980-4749-adb7-17be1f930d88 service nova] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Received unexpected event network-vif-plugged-5624a182-6a62-46b8-b456-e3e59a2ed84e for instance with vm_state building and task_state spawning. [ 739.463890] env[68674]: DEBUG nova.compute.manager [req-9e92d96c-80f4-47e5-8769-1487a4029a48 req-b7dee1d7-4980-4749-adb7-17be1f930d88 service nova] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Received event network-changed-5624a182-6a62-46b8-b456-e3e59a2ed84e {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 739.464159] env[68674]: DEBUG nova.compute.manager [req-9e92d96c-80f4-47e5-8769-1487a4029a48 req-b7dee1d7-4980-4749-adb7-17be1f930d88 service nova] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Refreshing instance network info cache due to event network-changed-5624a182-6a62-46b8-b456-e3e59a2ed84e. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 739.464479] env[68674]: DEBUG oslo_concurrency.lockutils [req-9e92d96c-80f4-47e5-8769-1487a4029a48 req-b7dee1d7-4980-4749-adb7-17be1f930d88 service nova] Acquiring lock "refresh_cache-3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.464704] env[68674]: DEBUG oslo_concurrency.lockutils [req-9e92d96c-80f4-47e5-8769-1487a4029a48 req-b7dee1d7-4980-4749-adb7-17be1f930d88 service nova] Acquired lock "refresh_cache-3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 739.465032] env[68674]: DEBUG nova.network.neutron [req-9e92d96c-80f4-47e5-8769-1487a4029a48 req-b7dee1d7-4980-4749-adb7-17be1f930d88 service nova] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Refreshing network info cache for port 5624a182-6a62-46b8-b456-e3e59a2ed84e {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 739.642402] env[68674]: DEBUG nova.compute.manager [None req-cc9f101a-c15b-4bc8-8ab4-e5f017a6e495 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 739.642674] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-cc9f101a-c15b-4bc8-8ab4-e5f017a6e495 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 739.643620] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97195aa6-295b-466f-b259-251714fe9113 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.653801] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc9f101a-c15b-4bc8-8ab4-e5f017a6e495 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 739.654230] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2c3fed0f-aca4-4f59-bb00-4e69152b1f7a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.661434] env[68674]: DEBUG oslo_vmware.api [None req-cc9f101a-c15b-4bc8-8ab4-e5f017a6e495 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Waiting for the task: (returnval){ [ 739.661434] env[68674]: value = "task-3239858" [ 739.661434] env[68674]: _type = "Task" [ 739.661434] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.671696] env[68674]: DEBUG oslo_vmware.api [None req-cc9f101a-c15b-4bc8-8ab4-e5f017a6e495 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239858, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.717482] env[68674]: DEBUG oslo_vmware.api [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Task: {'id': task-3239855, 'name': ReconfigVM_Task, 'duration_secs': 0.845563} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.717828] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Reconfigured VM instance instance-0000002c to attach disk [datastore1] 0eaf7d72-755b-4977-8f71-7d53ad1cf573/0eaf7d72-755b-4977-8f71-7d53ad1cf573.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 739.718576] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-38cddf45-2519-4a7b-a29b-3f1a9cd8e8c7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.729491] env[68674]: DEBUG oslo_vmware.api [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Waiting for the task: (returnval){ [ 739.729491] env[68674]: value = "task-3239859" [ 739.729491] env[68674]: _type = "Task" [ 739.729491] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.741333] env[68674]: DEBUG oslo_vmware.api [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Task: {'id': task-3239859, 'name': Rename_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.750833] env[68674]: DEBUG nova.compute.manager [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 739.783014] env[68674]: DEBUG nova.virt.hardware [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 739.783584] env[68674]: DEBUG nova.virt.hardware [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 739.783810] env[68674]: DEBUG nova.virt.hardware [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 739.784117] env[68674]: DEBUG nova.virt.hardware [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 739.784399] env[68674]: DEBUG nova.virt.hardware [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 739.784539] env[68674]: DEBUG nova.virt.hardware [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 739.784802] env[68674]: DEBUG nova.virt.hardware [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 739.785053] env[68674]: DEBUG nova.virt.hardware [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 739.785301] env[68674]: DEBUG nova.virt.hardware [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 739.785498] env[68674]: DEBUG nova.virt.hardware [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 739.785748] env[68674]: DEBUG nova.virt.hardware [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 739.786762] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d971d771-35e3-470a-9a16-c37be98ca45b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.799736] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4400a5fc-db1f-4bcf-8987-2d3ce26cf7a2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.805116] env[68674]: DEBUG nova.compute.manager [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 739.805442] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 739.805719] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-445521a0-6b8e-4314-8636-0423e4d0424f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.813062] env[68674]: DEBUG oslo_vmware.api [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Waiting for the task: (returnval){ [ 739.813062] env[68674]: value = "task-3239860" [ 739.813062] env[68674]: _type = "Task" [ 739.813062] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.832278] env[68674]: DEBUG oslo_vmware.api [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Task: {'id': task-3239860, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.869855] env[68674]: DEBUG nova.scheduler.client.report [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 739.917271] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239856, 'name': CreateVM_Task, 'duration_secs': 0.51089} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.917502] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 739.918199] env[68674]: DEBUG oslo_concurrency.lockutils [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.918362] env[68674]: DEBUG oslo_concurrency.lockutils [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 739.918690] env[68674]: DEBUG oslo_concurrency.lockutils [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 739.918950] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-073ef245-e0bc-4fd7-909b-87325027c683 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.926141] env[68674]: DEBUG oslo_vmware.api [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for the task: (returnval){ [ 739.926141] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]520ebeda-9383-41af-e6dc-ed0b2502fe80" [ 739.926141] env[68674]: _type = "Task" [ 739.926141] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.936455] env[68674]: DEBUG oslo_vmware.api [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]520ebeda-9383-41af-e6dc-ed0b2502fe80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.942140] env[68674]: DEBUG oslo_vmware.api [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239857, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.499834} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.942425] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 6803af03-b1d5-47e6-9471-5213469e4103/6803af03-b1d5-47e6-9471-5213469e4103.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 739.942648] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 739.942909] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0d7cf0f5-66dc-4d79-a8d3-6c617b972964 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.949367] env[68674]: DEBUG oslo_vmware.api [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for the task: (returnval){ [ 739.949367] env[68674]: value = "task-3239861" [ 739.949367] env[68674]: _type = "Task" [ 739.949367] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.959163] env[68674]: DEBUG oslo_vmware.api [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239861, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.832950] env[68674]: DEBUG nova.network.neutron [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Successfully updated port: 8ccc7517-d8c1-44f5-a6ef-52fd819f4904 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 740.837088] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.121s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 740.837227] env[68674]: DEBUG nova.compute.manager [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 740.840678] env[68674]: DEBUG nova.compute.manager [req-645da229-5577-4b0b-9b26-fd00706b24a0 req-48f9ade3-9e77-4d46-8f36-4ecb7e8ace27 service nova] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Received event network-vif-plugged-8ccc7517-d8c1-44f5-a6ef-52fd819f4904 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 740.841037] env[68674]: DEBUG oslo_concurrency.lockutils [req-645da229-5577-4b0b-9b26-fd00706b24a0 req-48f9ade3-9e77-4d46-8f36-4ecb7e8ace27 service nova] Acquiring lock "1699f556-d451-40e3-a213-7edb753b03f1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 740.843856] env[68674]: DEBUG oslo_concurrency.lockutils [req-645da229-5577-4b0b-9b26-fd00706b24a0 req-48f9ade3-9e77-4d46-8f36-4ecb7e8ace27 service nova] Lock "1699f556-d451-40e3-a213-7edb753b03f1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 740.843856] env[68674]: DEBUG oslo_concurrency.lockutils [req-645da229-5577-4b0b-9b26-fd00706b24a0 req-48f9ade3-9e77-4d46-8f36-4ecb7e8ace27 service nova] Lock "1699f556-d451-40e3-a213-7edb753b03f1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 740.843856] env[68674]: DEBUG nova.compute.manager [req-645da229-5577-4b0b-9b26-fd00706b24a0 req-48f9ade3-9e77-4d46-8f36-4ecb7e8ace27 service nova] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] No waiting events found dispatching network-vif-plugged-8ccc7517-d8c1-44f5-a6ef-52fd819f4904 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 740.843856] env[68674]: WARNING nova.compute.manager [req-645da229-5577-4b0b-9b26-fd00706b24a0 req-48f9ade3-9e77-4d46-8f36-4ecb7e8ace27 service nova] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Received unexpected event network-vif-plugged-8ccc7517-d8c1-44f5-a6ef-52fd819f4904 for instance with vm_state building and task_state spawning. [ 740.848074] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c68cd6cd-e996-4619-ab4d-f9499c53b819 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.109s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 740.848289] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c68cd6cd-e996-4619-ab4d-f9499c53b819 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 740.850154] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6de054e6-7a4e-4b96-87eb-26d0facf85d5 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.100s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 740.850344] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6de054e6-7a4e-4b96-87eb-26d0facf85d5 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 740.852128] env[68674]: DEBUG oslo_concurrency.lockutils [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.863s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 740.853746] env[68674]: INFO nova.compute.claims [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 740.869748] env[68674]: DEBUG oslo_vmware.api [None req-cc9f101a-c15b-4bc8-8ab4-e5f017a6e495 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239858, 'name': PowerOffVM_Task, 'duration_secs': 0.333367} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.871821] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc9f101a-c15b-4bc8-8ab4-e5f017a6e495 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 740.873526] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-cc9f101a-c15b-4bc8-8ab4-e5f017a6e495 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 740.881306] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6c29e28d-0e48-49f9-b388-c29c1de04343 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.883331] env[68674]: DEBUG oslo_vmware.api [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]520ebeda-9383-41af-e6dc-ed0b2502fe80, 'name': SearchDatastore_Task, 'duration_secs': 0.026781} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.883551] env[68674]: DEBUG oslo_vmware.api [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Task: {'id': task-3239860, 'name': PowerOffVM_Task, 'duration_secs': 0.264615} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.884112] env[68674]: DEBUG oslo_vmware.api [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Task: {'id': task-3239859, 'name': Rename_Task, 'duration_secs': 0.262038} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.884748] env[68674]: DEBUG oslo_concurrency.lockutils [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 740.884960] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 740.885202] env[68674]: DEBUG oslo_concurrency.lockutils [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.885345] env[68674]: DEBUG oslo_concurrency.lockutils [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 740.885518] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 740.885860] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 740.885975] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Volume detach. Driver type: vmdk {{(pid=68674) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 740.886168] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647422', 'volume_id': '6312e471-ebb6-489c-a453-cfed8d42b5ac', 'name': 'volume-6312e471-ebb6-489c-a453-cfed8d42b5ac', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6278d756-139c-4fcd-bf31-304c978d6682', 'attached_at': '', 'detached_at': '', 'volume_id': '6312e471-ebb6-489c-a453-cfed8d42b5ac', 'serial': '6312e471-ebb6-489c-a453-cfed8d42b5ac'} {{(pid=68674) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 740.886428] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 740.889540] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dc8fd326-2c3c-4b89-ba54-82f43b336d09 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.891839] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ecf8c5b-306f-4efd-9d82-1b50700ba61a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.894601] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0f82c40c-097a-4281-8206-9f0b6e8a47ad {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.896098] env[68674]: DEBUG oslo_vmware.api [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239861, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.098409} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.900019] env[68674]: INFO nova.scheduler.client.report [None req-c68cd6cd-e996-4619-ab4d-f9499c53b819 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Deleted allocations for instance 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8 [ 740.900019] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 740.900019] env[68674]: INFO nova.scheduler.client.report [None req-6de054e6-7a4e-4b96-87eb-26d0facf85d5 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Deleted allocations for instance 50bb7509-b7e9-4dc3-9746-acd46010cc26 [ 740.903625] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b55c6eac-10fc-4ec2-ba90-cb60fff70201 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.925601] env[68674]: DEBUG oslo_vmware.api [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Waiting for the task: (returnval){ [ 740.925601] env[68674]: value = "task-3239863" [ 740.925601] env[68674]: _type = "Task" [ 740.925601] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.927906] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0e055ef-bd58-48ea-88d1-495d8cb13809 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.930729] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 740.930916] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 740.945048] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74084124-3397-4107-ae19-6f11902e4666 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.957083] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Reconfiguring VM instance instance-0000002d to attach disk [datastore2] 6803af03-b1d5-47e6-9471-5213469e4103/6803af03-b1d5-47e6-9471-5213469e4103.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 740.963075] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-19c88a7a-b7d4-44f6-8280-8904efaaf907 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.980625] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-cc9f101a-c15b-4bc8-8ab4-e5f017a6e495 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 740.980846] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-cc9f101a-c15b-4bc8-8ab4-e5f017a6e495 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 740.981038] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9f101a-c15b-4bc8-8ab4-e5f017a6e495 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Deleting the datastore file [datastore1] 357b515d-ef37-4688-969e-f894be30edb7 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 740.982052] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-471e0c80-741a-4831-ae3d-303bbe0f2e6a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.984361] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba263882-1e0c-4208-a8d3-5f6433e42123 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.991823] env[68674]: DEBUG oslo_vmware.api [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for the task: (returnval){ [ 740.991823] env[68674]: value = "task-3239864" [ 740.991823] env[68674]: _type = "Task" [ 740.991823] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.992118] env[68674]: DEBUG oslo_vmware.api [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Task: {'id': task-3239863, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.992332] env[68674]: DEBUG oslo_vmware.api [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for the task: (returnval){ [ 740.992332] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523f0611-3f85-c396-9197-d75c084482cc" [ 740.992332] env[68674]: _type = "Task" [ 740.992332] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.012370] env[68674]: DEBUG oslo_vmware.api [None req-cc9f101a-c15b-4bc8-8ab4-e5f017a6e495 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Waiting for the task: (returnval){ [ 741.012370] env[68674]: value = "task-3239865" [ 741.012370] env[68674]: _type = "Task" [ 741.012370] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.014222] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87914046-dda6-4190-ac7c-62ce0ea5d0e1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.026055] env[68674]: DEBUG oslo_vmware.api [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523f0611-3f85-c396-9197-d75c084482cc, 'name': SearchDatastore_Task, 'duration_secs': 0.017413} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.026288] env[68674]: DEBUG oslo_vmware.api [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239864, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.027845] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00bd5efc-218a-423f-af18-69305988c41d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.030532] env[68674]: DEBUG nova.network.neutron [req-9e92d96c-80f4-47e5-8769-1487a4029a48 req-b7dee1d7-4980-4749-adb7-17be1f930d88 service nova] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Updated VIF entry in instance network info cache for port 5624a182-6a62-46b8-b456-e3e59a2ed84e. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 741.031109] env[68674]: DEBUG nova.network.neutron [req-9e92d96c-80f4-47e5-8769-1487a4029a48 req-b7dee1d7-4980-4749-adb7-17be1f930d88 service nova] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Updating instance_info_cache with network_info: [{"id": "5624a182-6a62-46b8-b456-e3e59a2ed84e", "address": "fa:16:3e:24:64:42", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.199", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5624a182-6a", "ovs_interfaceid": "5624a182-6a62-46b8-b456-e3e59a2ed84e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.047023] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] The volume has not been displaced from its original location: [datastore2] volume-6312e471-ebb6-489c-a453-cfed8d42b5ac/volume-6312e471-ebb6-489c-a453-cfed8d42b5ac.vmdk. No consolidation needed. {{(pid=68674) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 741.052357] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Reconfiguring VM instance instance-00000021 to detach disk 2000 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 741.052748] env[68674]: DEBUG oslo_vmware.api [None req-cc9f101a-c15b-4bc8-8ab4-e5f017a6e495 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239865, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.053500] env[68674]: DEBUG oslo_concurrency.lockutils [req-9e92d96c-80f4-47e5-8769-1487a4029a48 req-b7dee1d7-4980-4749-adb7-17be1f930d88 service nova] Releasing lock "refresh_cache-3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 741.054656] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ba79ab9-4f49-4d04-9ea8-d8be8411377a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.069656] env[68674]: DEBUG oslo_vmware.api [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for the task: (returnval){ [ 741.069656] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5260f9ea-d7a3-ef3f-84c6-1882cb8e36d0" [ 741.069656] env[68674]: _type = "Task" [ 741.069656] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.075549] env[68674]: DEBUG oslo_vmware.api [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Waiting for the task: (returnval){ [ 741.075549] env[68674]: value = "task-3239866" [ 741.075549] env[68674]: _type = "Task" [ 741.075549] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.085991] env[68674]: DEBUG oslo_vmware.api [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5260f9ea-d7a3-ef3f-84c6-1882cb8e36d0, 'name': SearchDatastore_Task, 'duration_secs': 0.020548} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.089039] env[68674]: DEBUG oslo_concurrency.lockutils [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 741.089301] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f/3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 741.089727] env[68674]: DEBUG oslo_vmware.api [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Task: {'id': task-3239866, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.089929] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6af281f9-b194-4281-b411-fb17dd973a7c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.095733] env[68674]: DEBUG oslo_vmware.api [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for the task: (returnval){ [ 741.095733] env[68674]: value = "task-3239867" [ 741.095733] env[68674]: _type = "Task" [ 741.095733] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.104103] env[68674]: DEBUG oslo_vmware.api [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239867, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.343112] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquiring lock "refresh_cache-1699f556-d451-40e3-a213-7edb753b03f1" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.344467] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquired lock "refresh_cache-1699f556-d451-40e3-a213-7edb753b03f1" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 741.344467] env[68674]: DEBUG nova.network.neutron [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 741.365846] env[68674]: DEBUG nova.compute.utils [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 741.374085] env[68674]: DEBUG nova.compute.manager [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 741.374836] env[68674]: DEBUG nova.network.neutron [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 741.435629] env[68674]: DEBUG nova.policy [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '26dfab4e06f2403697a602676410bbe7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38a52a0d746a4f16b5e4d5a6c984cc45', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 741.438595] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c68cd6cd-e996-4619-ab4d-f9499c53b819 tempest-ServerMetadataNegativeTestJSON-769964779 tempest-ServerMetadataNegativeTestJSON-769964779-project-member] Lock "3b0837ef-53fb-4851-b69f-ee0a1d89fbf8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.669s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 741.455172] env[68674]: DEBUG oslo_vmware.api [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Task: {'id': task-3239863, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.464633] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6de054e6-7a4e-4b96-87eb-26d0facf85d5 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Lock "50bb7509-b7e9-4dc3-9746-acd46010cc26" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.462s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 741.504465] env[68674]: DEBUG oslo_vmware.api [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239864, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.525880] env[68674]: DEBUG oslo_vmware.api [None req-cc9f101a-c15b-4bc8-8ab4-e5f017a6e495 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Task: {'id': task-3239865, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.372494} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.525999] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9f101a-c15b-4bc8-8ab4-e5f017a6e495 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 741.526210] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-cc9f101a-c15b-4bc8-8ab4-e5f017a6e495 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 741.526424] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-cc9f101a-c15b-4bc8-8ab4-e5f017a6e495 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 741.526641] env[68674]: INFO nova.compute.manager [None req-cc9f101a-c15b-4bc8-8ab4-e5f017a6e495 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Took 1.88 seconds to destroy the instance on the hypervisor. [ 741.526894] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cc9f101a-c15b-4bc8-8ab4-e5f017a6e495 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 741.527120] env[68674]: DEBUG nova.compute.manager [-] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 741.527259] env[68674]: DEBUG nova.network.neutron [-] [instance: 357b515d-ef37-4688-969e-f894be30edb7] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 741.588113] env[68674]: DEBUG oslo_vmware.api [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Task: {'id': task-3239866, 'name': ReconfigVM_Task, 'duration_secs': 0.299635} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.588907] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Reconfigured VM instance instance-00000021 to detach disk 2000 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 741.594631] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ba6876b-4f8b-443d-ba41-2f561d829212 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.616212] env[68674]: DEBUG oslo_vmware.api [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239867, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.618092] env[68674]: DEBUG oslo_vmware.api [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Waiting for the task: (returnval){ [ 741.618092] env[68674]: value = "task-3239868" [ 741.618092] env[68674]: _type = "Task" [ 741.618092] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.628792] env[68674]: DEBUG oslo_vmware.api [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Task: {'id': task-3239868, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.881026] env[68674]: DEBUG nova.compute.manager [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 741.895450] env[68674]: DEBUG nova.network.neutron [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 741.952159] env[68674]: DEBUG oslo_vmware.api [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Task: {'id': task-3239863, 'name': PowerOnVM_Task, 'duration_secs': 0.731594} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.952159] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 741.952159] env[68674]: INFO nova.compute.manager [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Took 10.30 seconds to spawn the instance on the hypervisor. [ 741.952159] env[68674]: DEBUG nova.compute.manager [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 741.952492] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a94c30a-350d-4d35-bb7a-11fa3daea016 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.970518] env[68674]: DEBUG nova.network.neutron [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Successfully created port: 52eba913-88d6-4c13-94bf-ad6cc7976b07 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 742.002937] env[68674]: DEBUG oslo_vmware.api [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239864, 'name': ReconfigVM_Task, 'duration_secs': 0.587057} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.003477] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Reconfigured VM instance instance-0000002d to attach disk [datastore2] 6803af03-b1d5-47e6-9471-5213469e4103/6803af03-b1d5-47e6-9471-5213469e4103.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 742.006517] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2c460f9c-f84b-4076-9fa2-59b832e92a85 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.016757] env[68674]: DEBUG oslo_vmware.api [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for the task: (returnval){ [ 742.016757] env[68674]: value = "task-3239869" [ 742.016757] env[68674]: _type = "Task" [ 742.016757] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.028498] env[68674]: DEBUG oslo_vmware.api [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239869, 'name': Rename_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.116639] env[68674]: DEBUG oslo_vmware.api [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239867, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.688216} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.116932] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f/3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 742.117160] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 742.117706] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e4022c06-3180-405e-97b7-f02090213c28 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.131335] env[68674]: DEBUG oslo_vmware.api [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Task: {'id': task-3239868, 'name': ReconfigVM_Task, 'duration_secs': 0.182699} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.132694] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647422', 'volume_id': '6312e471-ebb6-489c-a453-cfed8d42b5ac', 'name': 'volume-6312e471-ebb6-489c-a453-cfed8d42b5ac', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6278d756-139c-4fcd-bf31-304c978d6682', 'attached_at': '', 'detached_at': '', 'volume_id': '6312e471-ebb6-489c-a453-cfed8d42b5ac', 'serial': '6312e471-ebb6-489c-a453-cfed8d42b5ac'} {{(pid=68674) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 742.133050] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 742.133396] env[68674]: DEBUG oslo_vmware.api [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for the task: (returnval){ [ 742.133396] env[68674]: value = "task-3239870" [ 742.133396] env[68674]: _type = "Task" [ 742.133396] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.136840] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-176427fd-8a3e-4750-bb97-59d58c565eb3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.145660] env[68674]: DEBUG nova.network.neutron [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Updating instance_info_cache with network_info: [{"id": "8ccc7517-d8c1-44f5-a6ef-52fd819f4904", "address": "fa:16:3e:50:2c:57", "network": {"id": "f8839054-ed22-4f0f-abbf-f8ae7ca5f59d", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-782448124-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38a52a0d746a4f16b5e4d5a6c984cc45", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48512b02-ad5c-4105-ba7d-fd4775acf8e1", "external-id": "nsx-vlan-transportzone-516", "segmentation_id": 516, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ccc7517-d8", "ovs_interfaceid": "8ccc7517-d8c1-44f5-a6ef-52fd819f4904", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.152365] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 742.153667] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5cea2ac8-7cb3-4d6b-a748-f2cc406fcf5e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.155728] env[68674]: DEBUG oslo_vmware.api [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239870, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.252974] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 742.252974] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 742.252974] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Deleting the datastore file [datastore2] 6278d756-139c-4fcd-bf31-304c978d6682 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 742.252974] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cd0a72db-3f95-4fdd-b547-9dda40621066 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.258923] env[68674]: DEBUG oslo_vmware.api [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Waiting for the task: (returnval){ [ 742.258923] env[68674]: value = "task-3239872" [ 742.258923] env[68674]: _type = "Task" [ 742.258923] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.272289] env[68674]: DEBUG oslo_vmware.api [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Task: {'id': task-3239872, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.349629] env[68674]: DEBUG nova.compute.manager [req-10b5b09a-a5f0-407c-bc8b-2c91306c4d09 req-edecfb46-bc62-4a13-854f-e5ad373659fb service nova] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Received event network-changed-8ccc7517-d8c1-44f5-a6ef-52fd819f4904 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 742.349921] env[68674]: DEBUG nova.compute.manager [req-10b5b09a-a5f0-407c-bc8b-2c91306c4d09 req-edecfb46-bc62-4a13-854f-e5ad373659fb service nova] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Refreshing instance network info cache due to event network-changed-8ccc7517-d8c1-44f5-a6ef-52fd819f4904. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 742.350139] env[68674]: DEBUG oslo_concurrency.lockutils [req-10b5b09a-a5f0-407c-bc8b-2c91306c4d09 req-edecfb46-bc62-4a13-854f-e5ad373659fb service nova] Acquiring lock "refresh_cache-1699f556-d451-40e3-a213-7edb753b03f1" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.485442] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9144a98-379c-4f98-8419-699bf82b64e5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.493792] env[68674]: INFO nova.compute.manager [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Took 65.12 seconds to build instance. [ 742.499527] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51ae5d3b-66d1-4268-bc9e-26093793b1df {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.503873] env[68674]: DEBUG nova.network.neutron [-] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.536714] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b937665c-5217-4905-a37c-6d78df5ad748 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.544796] env[68674]: DEBUG oslo_vmware.api [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239869, 'name': Rename_Task, 'duration_secs': 0.385255} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.547223] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 742.547514] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8d535dd1-3acd-4c21-85a4-49b7ef27a679 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.550250] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4dcdb54-f093-44f0-ab82-4950f6fe7703 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.566651] env[68674]: DEBUG nova.compute.provider_tree [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 742.569077] env[68674]: DEBUG oslo_vmware.api [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for the task: (returnval){ [ 742.569077] env[68674]: value = "task-3239873" [ 742.569077] env[68674]: _type = "Task" [ 742.569077] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.581019] env[68674]: DEBUG oslo_vmware.api [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239873, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.649070] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Releasing lock "refresh_cache-1699f556-d451-40e3-a213-7edb753b03f1" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 742.649414] env[68674]: DEBUG nova.compute.manager [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Instance network_info: |[{"id": "8ccc7517-d8c1-44f5-a6ef-52fd819f4904", "address": "fa:16:3e:50:2c:57", "network": {"id": "f8839054-ed22-4f0f-abbf-f8ae7ca5f59d", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-782448124-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38a52a0d746a4f16b5e4d5a6c984cc45", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48512b02-ad5c-4105-ba7d-fd4775acf8e1", "external-id": "nsx-vlan-transportzone-516", "segmentation_id": 516, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ccc7517-d8", "ovs_interfaceid": "8ccc7517-d8c1-44f5-a6ef-52fd819f4904", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 742.649713] env[68674]: DEBUG oslo_vmware.api [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239870, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.215476} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.649937] env[68674]: DEBUG oslo_concurrency.lockutils [req-10b5b09a-a5f0-407c-bc8b-2c91306c4d09 req-edecfb46-bc62-4a13-854f-e5ad373659fb service nova] Acquired lock "refresh_cache-1699f556-d451-40e3-a213-7edb753b03f1" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 742.650141] env[68674]: DEBUG nova.network.neutron [req-10b5b09a-a5f0-407c-bc8b-2c91306c4d09 req-edecfb46-bc62-4a13-854f-e5ad373659fb service nova] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Refreshing network info cache for port 8ccc7517-d8c1-44f5-a6ef-52fd819f4904 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 742.651446] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:2c:57', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '48512b02-ad5c-4105-ba7d-fd4775acf8e1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8ccc7517-d8c1-44f5-a6ef-52fd819f4904', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 742.658965] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Creating folder: Project (38a52a0d746a4f16b5e4d5a6c984cc45). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 742.659286] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 742.662369] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3f2ecaa1-4991-4b8f-acae-d3c89ba1e4aa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.664577] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19cc0557-6bff-4cb1-9fd8-7867a6fcbd34 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.688988] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Reconfiguring VM instance instance-0000002e to attach disk [datastore2] 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f/3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 742.690821] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b60adc3-91a3-48b2-b6c1-4d07a0f75b3b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.705474] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Created folder: Project (38a52a0d746a4f16b5e4d5a6c984cc45) in parent group-v647377. [ 742.705678] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Creating folder: Instances. Parent ref: group-v647515. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 742.708068] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-30328f05-45b7-4ac4-8656-a5394dbb2d80 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.715643] env[68674]: DEBUG oslo_vmware.api [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for the task: (returnval){ [ 742.715643] env[68674]: value = "task-3239876" [ 742.715643] env[68674]: _type = "Task" [ 742.715643] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.723277] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Created folder: Instances in parent group-v647515. [ 742.724060] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 742.726591] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 742.726841] env[68674]: DEBUG oslo_vmware.api [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239876, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.727061] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bb642331-770d-4c43-92ec-555c399b7ee4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.748418] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 742.748418] env[68674]: value = "task-3239877" [ 742.748418] env[68674]: _type = "Task" [ 742.748418] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.757478] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239877, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.767888] env[68674]: DEBUG oslo_vmware.api [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Task: {'id': task-3239872, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146571} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.768160] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 742.768409] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 742.768534] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 742.768689] env[68674]: INFO nova.compute.manager [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Took 2.96 seconds to destroy the instance on the hypervisor. [ 742.768947] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 742.769160] env[68674]: DEBUG nova.compute.manager [-] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 742.769303] env[68674]: DEBUG nova.network.neutron [-] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 742.900023] env[68674]: DEBUG nova.compute.manager [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 742.935140] env[68674]: DEBUG nova.virt.hardware [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 742.935465] env[68674]: DEBUG nova.virt.hardware [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 742.935571] env[68674]: DEBUG nova.virt.hardware [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 742.935771] env[68674]: DEBUG nova.virt.hardware [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 742.935919] env[68674]: DEBUG nova.virt.hardware [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 742.936093] env[68674]: DEBUG nova.virt.hardware [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 742.936314] env[68674]: DEBUG nova.virt.hardware [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 742.936972] env[68674]: DEBUG nova.virt.hardware [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 742.937590] env[68674]: DEBUG nova.virt.hardware [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 742.937662] env[68674]: DEBUG nova.virt.hardware [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 742.937799] env[68674]: DEBUG nova.virt.hardware [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 742.939123] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c61c6030-890c-4128-a821-fee681aca419 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.947676] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47462133-5f49-4381-acd3-8e7291779cca {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.993870] env[68674]: DEBUG nova.network.neutron [req-10b5b09a-a5f0-407c-bc8b-2c91306c4d09 req-edecfb46-bc62-4a13-854f-e5ad373659fb service nova] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Updated VIF entry in instance network info cache for port 8ccc7517-d8c1-44f5-a6ef-52fd819f4904. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 742.994113] env[68674]: DEBUG nova.network.neutron [req-10b5b09a-a5f0-407c-bc8b-2c91306c4d09 req-edecfb46-bc62-4a13-854f-e5ad373659fb service nova] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Updating instance_info_cache with network_info: [{"id": "8ccc7517-d8c1-44f5-a6ef-52fd819f4904", "address": "fa:16:3e:50:2c:57", "network": {"id": "f8839054-ed22-4f0f-abbf-f8ae7ca5f59d", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-782448124-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38a52a0d746a4f16b5e4d5a6c984cc45", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48512b02-ad5c-4105-ba7d-fd4775acf8e1", "external-id": "nsx-vlan-transportzone-516", "segmentation_id": 516, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ccc7517-d8", "ovs_interfaceid": "8ccc7517-d8c1-44f5-a6ef-52fd819f4904", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.000337] env[68674]: DEBUG oslo_concurrency.lockutils [None req-55da8e92-0867-4dae-8905-3ce82d57f5a3 tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Lock "0eaf7d72-755b-4977-8f71-7d53ad1cf573" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 110.607s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 743.007822] env[68674]: INFO nova.compute.manager [-] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Took 1.48 seconds to deallocate network for instance. [ 743.070865] env[68674]: DEBUG nova.scheduler.client.report [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 743.088282] env[68674]: DEBUG oslo_vmware.api [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239873, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.229073] env[68674]: DEBUG oslo_vmware.api [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239876, 'name': ReconfigVM_Task, 'duration_secs': 0.365729} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.229073] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Reconfigured VM instance instance-0000002e to attach disk [datastore2] 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f/3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 743.229073] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3b47e3ba-1481-4262-ab4f-b4a698a7c1ba {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.237218] env[68674]: DEBUG oslo_vmware.api [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for the task: (returnval){ [ 743.237218] env[68674]: value = "task-3239878" [ 743.237218] env[68674]: _type = "Task" [ 743.237218] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.246361] env[68674]: DEBUG oslo_vmware.api [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239878, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.263907] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239877, 'name': CreateVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.498613] env[68674]: DEBUG oslo_concurrency.lockutils [req-10b5b09a-a5f0-407c-bc8b-2c91306c4d09 req-edecfb46-bc62-4a13-854f-e5ad373659fb service nova] Releasing lock "refresh_cache-1699f556-d451-40e3-a213-7edb753b03f1" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 743.498982] env[68674]: DEBUG nova.compute.manager [req-10b5b09a-a5f0-407c-bc8b-2c91306c4d09 req-edecfb46-bc62-4a13-854f-e5ad373659fb service nova] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Received event network-vif-deleted-4f848177-8140-4862-a7f0-f901b045c157 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 743.499216] env[68674]: INFO nova.compute.manager [req-10b5b09a-a5f0-407c-bc8b-2c91306c4d09 req-edecfb46-bc62-4a13-854f-e5ad373659fb service nova] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Neutron deleted interface 4f848177-8140-4862-a7f0-f901b045c157; detaching it from the instance and deleting it from the info cache [ 743.499302] env[68674]: DEBUG nova.network.neutron [req-10b5b09a-a5f0-407c-bc8b-2c91306c4d09 req-edecfb46-bc62-4a13-854f-e5ad373659fb service nova] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.504988] env[68674]: DEBUG nova.compute.manager [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 743.514141] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cc9f101a-c15b-4bc8-8ab4-e5f017a6e495 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 743.584124] env[68674]: DEBUG oslo_concurrency.lockutils [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.732s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 743.584668] env[68674]: DEBUG nova.compute.manager [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 743.590588] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 34.055s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 743.590588] env[68674]: DEBUG nova.objects.instance [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68674) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 743.597025] env[68674]: DEBUG oslo_vmware.api [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239873, 'name': PowerOnVM_Task, 'duration_secs': 0.739584} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.598486] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 743.598682] env[68674]: INFO nova.compute.manager [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Took 9.23 seconds to spawn the instance on the hypervisor. [ 743.598857] env[68674]: DEBUG nova.compute.manager [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 743.599963] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fa2d85f-5262-43e8-b8d7-d9d0f0e3411c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.751018] env[68674]: DEBUG oslo_vmware.api [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239878, 'name': Rename_Task, 'duration_secs': 0.208903} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.751018] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 743.751018] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7c90af9d-f70c-40ce-80c4-1b00285b4da3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.764026] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239877, 'name': CreateVM_Task, 'duration_secs': 0.58755} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.764026] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 743.764026] env[68674]: DEBUG oslo_vmware.api [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for the task: (returnval){ [ 743.764026] env[68674]: value = "task-3239879" [ 743.764026] env[68674]: _type = "Task" [ 743.764026] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.764026] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.764026] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 743.764458] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 743.764458] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ee87495-27ae-4fd9-8096-94ae96114f99 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.774914] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 743.774914] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f38772-c9bd-a9e6-7c01-455f8f38022c" [ 743.774914] env[68674]: _type = "Task" [ 743.774914] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.784858] env[68674]: DEBUG oslo_vmware.api [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239879, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.793535] env[68674]: DEBUG nova.network.neutron [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Successfully updated port: 52eba913-88d6-4c13-94bf-ad6cc7976b07 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 743.800742] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f38772-c9bd-a9e6-7c01-455f8f38022c, 'name': SearchDatastore_Task, 'duration_secs': 0.017347} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.801330] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 743.802148] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 743.802493] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.802732] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 743.802975] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 743.805508] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-49858e7d-eacb-40b9-9c00-d46cee2e20ce {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.808132] env[68674]: DEBUG nova.compute.manager [req-03b7dff2-42b1-4f95-abad-5767e3a4b31a req-e6026ed2-2e5c-412b-a1dc-91aab7d803ad service nova] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Received event network-vif-plugged-52eba913-88d6-4c13-94bf-ad6cc7976b07 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 743.808363] env[68674]: DEBUG oslo_concurrency.lockutils [req-03b7dff2-42b1-4f95-abad-5767e3a4b31a req-e6026ed2-2e5c-412b-a1dc-91aab7d803ad service nova] Acquiring lock "0e3c27fe-a2d9-45dc-9559-a678f90a6fef-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 743.808588] env[68674]: DEBUG oslo_concurrency.lockutils [req-03b7dff2-42b1-4f95-abad-5767e3a4b31a req-e6026ed2-2e5c-412b-a1dc-91aab7d803ad service nova] Lock "0e3c27fe-a2d9-45dc-9559-a678f90a6fef-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 743.808769] env[68674]: DEBUG oslo_concurrency.lockutils [req-03b7dff2-42b1-4f95-abad-5767e3a4b31a req-e6026ed2-2e5c-412b-a1dc-91aab7d803ad service nova] Lock "0e3c27fe-a2d9-45dc-9559-a678f90a6fef-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 743.809115] env[68674]: DEBUG nova.compute.manager [req-03b7dff2-42b1-4f95-abad-5767e3a4b31a req-e6026ed2-2e5c-412b-a1dc-91aab7d803ad service nova] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] No waiting events found dispatching network-vif-plugged-52eba913-88d6-4c13-94bf-ad6cc7976b07 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 743.809414] env[68674]: WARNING nova.compute.manager [req-03b7dff2-42b1-4f95-abad-5767e3a4b31a req-e6026ed2-2e5c-412b-a1dc-91aab7d803ad service nova] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Received unexpected event network-vif-plugged-52eba913-88d6-4c13-94bf-ad6cc7976b07 for instance with vm_state building and task_state spawning. [ 743.817450] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 743.817715] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 743.818757] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5883fc49-a583-41e8-83cf-9b9f603b9c06 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.825076] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 743.825076] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527e8f62-1346-060a-34c3-a361ab8e8406" [ 743.825076] env[68674]: _type = "Task" [ 743.825076] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.833752] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527e8f62-1346-060a-34c3-a361ab8e8406, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.965846] env[68674]: INFO nova.compute.manager [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Rebuilding instance [ 744.003521] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5be52f23-f614-4bd3-9a3e-0e52aa3eaaa1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.020911] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f75329c7-5ce3-4568-9fd2-502762876e55 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.039919] env[68674]: DEBUG nova.compute.manager [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 744.040770] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2f9adce-c339-4a79-9262-a1f3bb911fad {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.050462] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 744.063707] env[68674]: DEBUG nova.compute.manager [req-10b5b09a-a5f0-407c-bc8b-2c91306c4d09 req-edecfb46-bc62-4a13-854f-e5ad373659fb service nova] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Detach interface failed, port_id=4f848177-8140-4862-a7f0-f901b045c157, reason: Instance 357b515d-ef37-4688-969e-f894be30edb7 could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 744.093996] env[68674]: DEBUG nova.compute.utils [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 744.100627] env[68674]: DEBUG nova.compute.manager [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 744.100830] env[68674]: DEBUG nova.network.neutron [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 744.122971] env[68674]: INFO nova.compute.manager [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Took 53.66 seconds to build instance. [ 744.155698] env[68674]: DEBUG nova.policy [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '53d75d4514d745678ca7db4bbc7e596d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '30b0da251a0d4f9c96f907b31ef9d5e0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 744.286319] env[68674]: DEBUG oslo_vmware.api [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239879, 'name': PowerOnVM_Task, 'duration_secs': 0.480047} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.286319] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 744.286319] env[68674]: INFO nova.compute.manager [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Took 7.17 seconds to spawn the instance on the hypervisor. [ 744.286319] env[68674]: DEBUG nova.compute.manager [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 744.286319] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a01e037-d758-4dad-b94c-73865aad72b4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.305243] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquiring lock "refresh_cache-0e3c27fe-a2d9-45dc-9559-a678f90a6fef" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.305243] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquired lock "refresh_cache-0e3c27fe-a2d9-45dc-9559-a678f90a6fef" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 744.305243] env[68674]: DEBUG nova.network.neutron [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 744.305243] env[68674]: DEBUG nova.network.neutron [-] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.345182] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527e8f62-1346-060a-34c3-a361ab8e8406, 'name': SearchDatastore_Task, 'duration_secs': 0.029759} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.345182] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc12fe90-32e7-4b99-a176-19d54011e878 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.351285] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 744.351285] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a035b5-8474-c670-3a7a-16651c39d9ab" [ 744.351285] env[68674]: _type = "Task" [ 744.351285] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.362059] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a035b5-8474-c670-3a7a-16651c39d9ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.596773] env[68674]: DEBUG nova.compute.manager [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 744.604498] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a04a478d-0370-4b39-8334-cc8fb535c6d6 tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 744.606034] env[68674]: DEBUG oslo_concurrency.lockutils [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 34.165s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 744.606034] env[68674]: DEBUG nova.objects.instance [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68674) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 744.610942] env[68674]: DEBUG nova.compute.manager [req-d4fb4d18-11e3-42ad-ba71-676f5e61ab0e req-ba1c9609-5f74-44e3-96cd-0cd01349f15d service nova] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Received event network-changed-6ef03ee8-7859-4976-be77-54e193e997a1 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 744.611116] env[68674]: DEBUG nova.compute.manager [req-d4fb4d18-11e3-42ad-ba71-676f5e61ab0e req-ba1c9609-5f74-44e3-96cd-0cd01349f15d service nova] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Refreshing instance network info cache due to event network-changed-6ef03ee8-7859-4976-be77-54e193e997a1. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 744.611461] env[68674]: DEBUG oslo_concurrency.lockutils [req-d4fb4d18-11e3-42ad-ba71-676f5e61ab0e req-ba1c9609-5f74-44e3-96cd-0cd01349f15d service nova] Acquiring lock "refresh_cache-0eaf7d72-755b-4977-8f71-7d53ad1cf573" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.611697] env[68674]: DEBUG oslo_concurrency.lockutils [req-d4fb4d18-11e3-42ad-ba71-676f5e61ab0e req-ba1c9609-5f74-44e3-96cd-0cd01349f15d service nova] Acquired lock "refresh_cache-0eaf7d72-755b-4977-8f71-7d53ad1cf573" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 744.611795] env[68674]: DEBUG nova.network.neutron [req-d4fb4d18-11e3-42ad-ba71-676f5e61ab0e req-ba1c9609-5f74-44e3-96cd-0cd01349f15d service nova] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Refreshing network info cache for port 6ef03ee8-7859-4976-be77-54e193e997a1 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 744.625905] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7eae39f0-35f7-4675-becd-6efdb84b857f tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Lock "6803af03-b1d5-47e6-9471-5213469e4103" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 109.651s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 744.807187] env[68674]: INFO nova.compute.manager [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Took 50.87 seconds to build instance. [ 744.807187] env[68674]: INFO nova.compute.manager [-] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Took 2.04 seconds to deallocate network for instance. [ 744.822042] env[68674]: DEBUG nova.network.neutron [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Successfully created port: 3bad0946-10af-40d0-a8c2-a5469f09cf39 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 744.864208] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a035b5-8474-c670-3a7a-16651c39d9ab, 'name': SearchDatastore_Task, 'duration_secs': 0.011232} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.865428] env[68674]: DEBUG nova.network.neutron [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 744.867614] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 744.867900] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 1699f556-d451-40e3-a213-7edb753b03f1/1699f556-d451-40e3-a213-7edb753b03f1.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 744.868193] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cd7774a3-2cb7-4af7-a58e-72c362a33220 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.879605] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 744.879605] env[68674]: value = "task-3239880" [ 744.879605] env[68674]: _type = "Task" [ 744.879605] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.887987] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3239880, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.069111] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 745.069466] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6e3e6f3f-5af0-4ea1-969b-f0c1de7ecd31 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.077696] env[68674]: DEBUG oslo_vmware.api [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Waiting for the task: (returnval){ [ 745.077696] env[68674]: value = "task-3239881" [ 745.077696] env[68674]: _type = "Task" [ 745.077696] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.087778] env[68674]: DEBUG oslo_vmware.api [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Task: {'id': task-3239881, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.131613] env[68674]: DEBUG nova.compute.manager [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 745.310443] env[68674]: DEBUG oslo_concurrency.lockutils [None req-47ddad09-e63c-4b63-83ee-3b23c915ecca tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Lock "3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 109.876s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 745.369254] env[68674]: INFO nova.compute.manager [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Took 0.56 seconds to detach 1 volumes for instance. [ 745.372690] env[68674]: DEBUG nova.compute.manager [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Deleting volume: 6312e471-ebb6-489c-a453-cfed8d42b5ac {{(pid=68674) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 745.403202] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3239880, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.498855] env[68674]: DEBUG nova.network.neutron [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Updating instance_info_cache with network_info: [{"id": "52eba913-88d6-4c13-94bf-ad6cc7976b07", "address": "fa:16:3e:a8:a1:4e", "network": {"id": "f8839054-ed22-4f0f-abbf-f8ae7ca5f59d", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-782448124-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38a52a0d746a4f16b5e4d5a6c984cc45", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48512b02-ad5c-4105-ba7d-fd4775acf8e1", "external-id": "nsx-vlan-transportzone-516", "segmentation_id": 516, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52eba913-88", "ovs_interfaceid": "52eba913-88d6-4c13-94bf-ad6cc7976b07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 745.590300] env[68674]: DEBUG oslo_vmware.api [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Task: {'id': task-3239881, 'name': PowerOffVM_Task, 'duration_secs': 0.266826} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.590544] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 745.591535] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 745.591913] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ea571aa7-2629-424b-a4d5-47e6c6762ac6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.599101] env[68674]: DEBUG oslo_vmware.api [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Waiting for the task: (returnval){ [ 745.599101] env[68674]: value = "task-3239883" [ 745.599101] env[68674]: _type = "Task" [ 745.599101] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.607113] env[68674]: DEBUG nova.compute.manager [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 745.616034] env[68674]: DEBUG oslo_concurrency.lockutils [None req-41bd0e4d-aeea-4381-8173-90942767135d tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 745.621430] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d418eace-cfba-4b40-8806-482dbab1f39d tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.955s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 745.621717] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d418eace-cfba-4b40-8806-482dbab1f39d tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.005s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 745.624311] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.871s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 745.625932] env[68674]: INFO nova.compute.claims [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 745.634115] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] VM already powered off {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 745.634295] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Volume detach. Driver type: vmdk {{(pid=68674) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 745.634856] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647448', 'volume_id': 'ad78e308-8afd-46bf-a8e5-5e31a5c091b0', 'name': 'volume-ad78e308-8afd-46bf-a8e5-5e31a5c091b0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1189fa93-608b-4684-a675-f1caf29a9f43', 'attached_at': '', 'detached_at': '', 'volume_id': 'ad78e308-8afd-46bf-a8e5-5e31a5c091b0', 'serial': 'ad78e308-8afd-46bf-a8e5-5e31a5c091b0'} {{(pid=68674) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 745.643279] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df85310f-77b7-4fb8-ad00-6534449ace85 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.677625] env[68674]: DEBUG nova.virt.hardware [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 745.677970] env[68674]: DEBUG nova.virt.hardware [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 745.678128] env[68674]: DEBUG nova.virt.hardware [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 745.678240] env[68674]: DEBUG nova.virt.hardware [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 745.678406] env[68674]: DEBUG nova.virt.hardware [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 745.678563] env[68674]: DEBUG nova.virt.hardware [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 745.678776] env[68674]: DEBUG nova.virt.hardware [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 745.678937] env[68674]: DEBUG nova.virt.hardware [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 745.679125] env[68674]: DEBUG nova.virt.hardware [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 745.679336] env[68674]: DEBUG nova.virt.hardware [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 745.679537] env[68674]: DEBUG nova.virt.hardware [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 745.680591] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 745.681433] env[68674]: DEBUG nova.network.neutron [req-d4fb4d18-11e3-42ad-ba71-676f5e61ab0e req-ba1c9609-5f74-44e3-96cd-0cd01349f15d service nova] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Updated VIF entry in instance network info cache for port 6ef03ee8-7859-4976-be77-54e193e997a1. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 745.681904] env[68674]: DEBUG nova.network.neutron [req-d4fb4d18-11e3-42ad-ba71-676f5e61ab0e req-ba1c9609-5f74-44e3-96cd-0cd01349f15d service nova] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Updating instance_info_cache with network_info: [{"id": "6ef03ee8-7859-4976-be77-54e193e997a1", "address": "fa:16:3e:f8:aa:49", "network": {"id": "91238196-dc46-43a7-b28d-69c5b89ab844", "bridge": "br-int", "label": "tempest-ServersTestJSON-1435761967-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b43bb0fe5b1c4383b5089eb59db781ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d70692eb-97b3-417c-a4ca-1ee888246ad9", "external-id": "nsx-vlan-transportzone-342", "segmentation_id": 342, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ef03ee8-78", "ovs_interfaceid": "6ef03ee8-7859-4976-be77-54e193e997a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 745.687308] env[68674]: INFO nova.scheduler.client.report [None req-d418eace-cfba-4b40-8806-482dbab1f39d tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Deleted allocations for instance 367461db-8bc4-4cf0-b7f6-f79ee2bf8589 [ 745.691614] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e90e803-53d1-44a7-b9bc-8f82743920f7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.696837] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afc44918-57f4-4a3f-9771-981c57606e0e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.715863] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8f07c6c-4dc4-4291-a58d-833d2184906b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.721527] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-253e802c-3cc5-427b-bab8-58029e173e33 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.759020] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-442a7e62-c851-4e81-bd6b-d60b6c347a8d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.775450] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] The volume has not been displaced from its original location: [datastore1] volume-ad78e308-8afd-46bf-a8e5-5e31a5c091b0/volume-ad78e308-8afd-46bf-a8e5-5e31a5c091b0.vmdk. No consolidation needed. {{(pid=68674) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 745.781200] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Reconfiguring VM instance instance-00000027 to detach disk 2000 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 745.781902] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7997947a-1052-40e8-bb24-b62034b3f344 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.805997] env[68674]: DEBUG oslo_vmware.api [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Waiting for the task: (returnval){ [ 745.805997] env[68674]: value = "task-3239884" [ 745.805997] env[68674]: _type = "Task" [ 745.805997] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.814219] env[68674]: DEBUG nova.compute.manager [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 745.816679] env[68674]: DEBUG oslo_vmware.api [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Task: {'id': task-3239884, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.894970] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3239880, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.798946} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.895277] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 1699f556-d451-40e3-a213-7edb753b03f1/1699f556-d451-40e3-a213-7edb753b03f1.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 745.895493] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 745.895741] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8dad6cce-76f0-4286-adbd-5fedb264b0c5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.902968] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 745.902968] env[68674]: value = "task-3239885" [ 745.902968] env[68674]: _type = "Task" [ 745.902968] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.910888] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3239885, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.951478] env[68674]: DEBUG nova.compute.manager [req-dc273cd7-ff5e-4723-8e9f-5e627f8c4ed2 req-eebc934a-fdf1-436c-bff5-62d91df90267 service nova] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Received event network-changed-52eba913-88d6-4c13-94bf-ad6cc7976b07 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 745.951478] env[68674]: DEBUG nova.compute.manager [req-dc273cd7-ff5e-4723-8e9f-5e627f8c4ed2 req-eebc934a-fdf1-436c-bff5-62d91df90267 service nova] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Refreshing instance network info cache due to event network-changed-52eba913-88d6-4c13-94bf-ad6cc7976b07. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 745.951478] env[68674]: DEBUG oslo_concurrency.lockutils [req-dc273cd7-ff5e-4723-8e9f-5e627f8c4ed2 req-eebc934a-fdf1-436c-bff5-62d91df90267 service nova] Acquiring lock "refresh_cache-0e3c27fe-a2d9-45dc-9559-a678f90a6fef" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.959370] env[68674]: DEBUG oslo_concurrency.lockutils [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 746.002292] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Releasing lock "refresh_cache-0e3c27fe-a2d9-45dc-9559-a678f90a6fef" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 746.002728] env[68674]: DEBUG nova.compute.manager [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Instance network_info: |[{"id": "52eba913-88d6-4c13-94bf-ad6cc7976b07", "address": "fa:16:3e:a8:a1:4e", "network": {"id": "f8839054-ed22-4f0f-abbf-f8ae7ca5f59d", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-782448124-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38a52a0d746a4f16b5e4d5a6c984cc45", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48512b02-ad5c-4105-ba7d-fd4775acf8e1", "external-id": "nsx-vlan-transportzone-516", "segmentation_id": 516, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52eba913-88", "ovs_interfaceid": "52eba913-88d6-4c13-94bf-ad6cc7976b07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 746.003210] env[68674]: DEBUG oslo_concurrency.lockutils [req-dc273cd7-ff5e-4723-8e9f-5e627f8c4ed2 req-eebc934a-fdf1-436c-bff5-62d91df90267 service nova] Acquired lock "refresh_cache-0e3c27fe-a2d9-45dc-9559-a678f90a6fef" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 746.003317] env[68674]: DEBUG nova.network.neutron [req-dc273cd7-ff5e-4723-8e9f-5e627f8c4ed2 req-eebc934a-fdf1-436c-bff5-62d91df90267 service nova] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Refreshing network info cache for port 52eba913-88d6-4c13-94bf-ad6cc7976b07 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 746.007491] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a8:a1:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '48512b02-ad5c-4105-ba7d-fd4775acf8e1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '52eba913-88d6-4c13-94bf-ad6cc7976b07', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 746.012772] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 746.015763] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 746.016268] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aca860b6-f0ca-4a33-8893-a46cba5bb673 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.040315] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 746.040315] env[68674]: value = "task-3239886" [ 746.040315] env[68674]: _type = "Task" [ 746.040315] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.049355] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239886, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.185838] env[68674]: DEBUG oslo_concurrency.lockutils [req-d4fb4d18-11e3-42ad-ba71-676f5e61ab0e req-ba1c9609-5f74-44e3-96cd-0cd01349f15d service nova] Releasing lock "refresh_cache-0eaf7d72-755b-4977-8f71-7d53ad1cf573" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 746.185838] env[68674]: DEBUG nova.compute.manager [req-d4fb4d18-11e3-42ad-ba71-676f5e61ab0e req-ba1c9609-5f74-44e3-96cd-0cd01349f15d service nova] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Received event network-vif-deleted-ad353ee1-e0ca-436b-b58e-eae548257eed {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 746.213856] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d418eace-cfba-4b40-8806-482dbab1f39d tempest-ServersListShow2100Test-2130531634 tempest-ServersListShow2100Test-2130531634-project-member] Lock "367461db-8bc4-4cf0-b7f6-f79ee2bf8589" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.924s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 746.320185] env[68674]: DEBUG oslo_vmware.api [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Task: {'id': task-3239884, 'name': ReconfigVM_Task, 'duration_secs': 0.432052} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.322349] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Reconfigured VM instance instance-00000027 to detach disk 2000 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 746.330923] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-989e534f-a425-4f3e-8ff3-715745ff17ee {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.344540] env[68674]: DEBUG nova.network.neutron [req-dc273cd7-ff5e-4723-8e9f-5e627f8c4ed2 req-eebc934a-fdf1-436c-bff5-62d91df90267 service nova] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Updated VIF entry in instance network info cache for port 52eba913-88d6-4c13-94bf-ad6cc7976b07. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 746.344787] env[68674]: DEBUG nova.network.neutron [req-dc273cd7-ff5e-4723-8e9f-5e627f8c4ed2 req-eebc934a-fdf1-436c-bff5-62d91df90267 service nova] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Updating instance_info_cache with network_info: [{"id": "52eba913-88d6-4c13-94bf-ad6cc7976b07", "address": "fa:16:3e:a8:a1:4e", "network": {"id": "f8839054-ed22-4f0f-abbf-f8ae7ca5f59d", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-782448124-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38a52a0d746a4f16b5e4d5a6c984cc45", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48512b02-ad5c-4105-ba7d-fd4775acf8e1", "external-id": "nsx-vlan-transportzone-516", "segmentation_id": 516, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52eba913-88", "ovs_interfaceid": "52eba913-88d6-4c13-94bf-ad6cc7976b07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.353298] env[68674]: DEBUG oslo_vmware.api [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Waiting for the task: (returnval){ [ 746.353298] env[68674]: value = "task-3239887" [ 746.353298] env[68674]: _type = "Task" [ 746.353298] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.360689] env[68674]: DEBUG oslo_concurrency.lockutils [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 746.370296] env[68674]: DEBUG oslo_vmware.api [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Task: {'id': task-3239887, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.414462] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3239885, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070395} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.414736] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 746.415548] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e287670-70e0-44dd-947d-94fa59cb7937 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.440390] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Reconfiguring VM instance instance-0000002f to attach disk [datastore2] 1699f556-d451-40e3-a213-7edb753b03f1/1699f556-d451-40e3-a213-7edb753b03f1.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 746.441513] env[68674]: DEBUG nova.network.neutron [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Successfully updated port: 3bad0946-10af-40d0-a8c2-a5469f09cf39 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 746.442713] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8295a2d-2328-41d7-ba30-bd207c29ee94 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.464644] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 746.464644] env[68674]: value = "task-3239888" [ 746.464644] env[68674]: _type = "Task" [ 746.464644] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.474261] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3239888, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.548845] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239886, 'name': CreateVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.850179] env[68674]: DEBUG oslo_concurrency.lockutils [req-dc273cd7-ff5e-4723-8e9f-5e627f8c4ed2 req-eebc934a-fdf1-436c-bff5-62d91df90267 service nova] Releasing lock "refresh_cache-0e3c27fe-a2d9-45dc-9559-a678f90a6fef" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 746.860024] env[68674]: DEBUG nova.compute.manager [req-f528bee6-76db-4795-aac9-53793a8d3ea7 req-3346a9e6-112c-4cbc-85f2-771728990bd6 service nova] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Received event network-vif-plugged-3bad0946-10af-40d0-a8c2-a5469f09cf39 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 746.860135] env[68674]: DEBUG oslo_concurrency.lockutils [req-f528bee6-76db-4795-aac9-53793a8d3ea7 req-3346a9e6-112c-4cbc-85f2-771728990bd6 service nova] Acquiring lock "5e3f667c-5d3a-4465-9186-779563087480-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 746.860323] env[68674]: DEBUG oslo_concurrency.lockutils [req-f528bee6-76db-4795-aac9-53793a8d3ea7 req-3346a9e6-112c-4cbc-85f2-771728990bd6 service nova] Lock "5e3f667c-5d3a-4465-9186-779563087480-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 746.860522] env[68674]: DEBUG oslo_concurrency.lockutils [req-f528bee6-76db-4795-aac9-53793a8d3ea7 req-3346a9e6-112c-4cbc-85f2-771728990bd6 service nova] Lock "5e3f667c-5d3a-4465-9186-779563087480-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 746.860702] env[68674]: DEBUG nova.compute.manager [req-f528bee6-76db-4795-aac9-53793a8d3ea7 req-3346a9e6-112c-4cbc-85f2-771728990bd6 service nova] [instance: 5e3f667c-5d3a-4465-9186-779563087480] No waiting events found dispatching network-vif-plugged-3bad0946-10af-40d0-a8c2-a5469f09cf39 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 746.860868] env[68674]: WARNING nova.compute.manager [req-f528bee6-76db-4795-aac9-53793a8d3ea7 req-3346a9e6-112c-4cbc-85f2-771728990bd6 service nova] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Received unexpected event network-vif-plugged-3bad0946-10af-40d0-a8c2-a5469f09cf39 for instance with vm_state building and task_state spawning. [ 746.861052] env[68674]: DEBUG nova.compute.manager [req-f528bee6-76db-4795-aac9-53793a8d3ea7 req-3346a9e6-112c-4cbc-85f2-771728990bd6 service nova] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Received event network-changed-3bad0946-10af-40d0-a8c2-a5469f09cf39 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 746.861230] env[68674]: DEBUG nova.compute.manager [req-f528bee6-76db-4795-aac9-53793a8d3ea7 req-3346a9e6-112c-4cbc-85f2-771728990bd6 service nova] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Refreshing instance network info cache due to event network-changed-3bad0946-10af-40d0-a8c2-a5469f09cf39. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 746.861468] env[68674]: DEBUG oslo_concurrency.lockutils [req-f528bee6-76db-4795-aac9-53793a8d3ea7 req-3346a9e6-112c-4cbc-85f2-771728990bd6 service nova] Acquiring lock "refresh_cache-5e3f667c-5d3a-4465-9186-779563087480" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.861618] env[68674]: DEBUG oslo_concurrency.lockutils [req-f528bee6-76db-4795-aac9-53793a8d3ea7 req-3346a9e6-112c-4cbc-85f2-771728990bd6 service nova] Acquired lock "refresh_cache-5e3f667c-5d3a-4465-9186-779563087480" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 746.861813] env[68674]: DEBUG nova.network.neutron [req-f528bee6-76db-4795-aac9-53793a8d3ea7 req-3346a9e6-112c-4cbc-85f2-771728990bd6 service nova] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Refreshing network info cache for port 3bad0946-10af-40d0-a8c2-a5469f09cf39 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 746.869019] env[68674]: DEBUG oslo_vmware.api [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Task: {'id': task-3239887, 'name': ReconfigVM_Task, 'duration_secs': 0.211284} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.871976] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647448', 'volume_id': 'ad78e308-8afd-46bf-a8e5-5e31a5c091b0', 'name': 'volume-ad78e308-8afd-46bf-a8e5-5e31a5c091b0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1189fa93-608b-4684-a675-f1caf29a9f43', 'attached_at': '', 'detached_at': '', 'volume_id': 'ad78e308-8afd-46bf-a8e5-5e31a5c091b0', 'serial': 'ad78e308-8afd-46bf-a8e5-5e31a5c091b0'} {{(pid=68674) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 746.872312] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 746.877047] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ec37447-d62a-4859-9957-50f64357834f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.883633] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 746.886445] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6087989e-bd85-4e47-96e7-a32518f7cca9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.961884] env[68674]: DEBUG oslo_concurrency.lockutils [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Acquiring lock "refresh_cache-5e3f667c-5d3a-4465-9186-779563087480" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.971832] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 746.972113] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 746.972351] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Deleting the datastore file [datastore1] 1189fa93-608b-4684-a675-f1caf29a9f43 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 746.974120] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d3d98c72-6550-4d0a-88cc-0e9b18df2bb2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.980085] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3239888, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.986770] env[68674]: DEBUG oslo_vmware.api [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Waiting for the task: (returnval){ [ 746.986770] env[68674]: value = "task-3239890" [ 746.986770] env[68674]: _type = "Task" [ 746.986770] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.995678] env[68674]: DEBUG oslo_vmware.api [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Task: {'id': task-3239890, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.058843] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239886, 'name': CreateVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.122224] env[68674]: DEBUG oslo_concurrency.lockutils [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "a4cb1632-eada-4b10-a66f-64fecf45fd76" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 747.122552] env[68674]: DEBUG oslo_concurrency.lockutils [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "a4cb1632-eada-4b10-a66f-64fecf45fd76" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 747.221210] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77986b82-f4ce-4d24-892f-0a47f6ab3b42 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.229463] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-188c6870-372d-4ef7-ab3f-f16f4eb13ec7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.262493] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d1016b9-ad2e-45dc-9b3b-b67153a38f14 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.271578] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-045de5d2-b0ed-4bca-881f-f42ae1443538 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.284673] env[68674]: DEBUG nova.compute.provider_tree [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 747.394733] env[68674]: DEBUG nova.network.neutron [req-f528bee6-76db-4795-aac9-53793a8d3ea7 req-3346a9e6-112c-4cbc-85f2-771728990bd6 service nova] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 747.397762] env[68674]: DEBUG nova.compute.manager [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 747.398648] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfbbf893-1107-4f24-9a4c-bfe95649d762 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.479348] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3239888, 'name': ReconfigVM_Task, 'duration_secs': 0.62867} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.479783] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Reconfigured VM instance instance-0000002f to attach disk [datastore2] 1699f556-d451-40e3-a213-7edb753b03f1/1699f556-d451-40e3-a213-7edb753b03f1.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 747.482192] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9f04492e-c258-4e70-b481-7cdb130cb272 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.492020] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 747.492020] env[68674]: value = "task-3239891" [ 747.492020] env[68674]: _type = "Task" [ 747.492020] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.500607] env[68674]: DEBUG nova.network.neutron [req-f528bee6-76db-4795-aac9-53793a8d3ea7 req-3346a9e6-112c-4cbc-85f2-771728990bd6 service nova] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.520580] env[68674]: DEBUG oslo_vmware.api [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Task: {'id': task-3239890, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129144} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.523031] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3239891, 'name': Rename_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.523031] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 747.523031] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 747.523031] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 747.558179] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239886, 'name': CreateVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.589322] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Volume detach. Driver type: vmdk {{(pid=68674) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 747.589739] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1ac584aa-ad3e-4a8a-a206-e5b95952e766 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.600197] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-343938aa-c180-4166-9c59-d378d2bdece6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.637519] env[68674]: ERROR nova.compute.manager [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Failed to detach volume ad78e308-8afd-46bf-a8e5-5e31a5c091b0 from /dev/sda: nova.exception.InstanceNotFound: Instance 1189fa93-608b-4684-a675-f1caf29a9f43 could not be found. [ 747.637519] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Traceback (most recent call last): [ 747.637519] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 747.637519] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] self.driver.rebuild(**kwargs) [ 747.637519] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 747.637519] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] raise NotImplementedError() [ 747.637519] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] NotImplementedError [ 747.637519] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] [ 747.637519] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] During handling of the above exception, another exception occurred: [ 747.637519] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] [ 747.637519] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Traceback (most recent call last): [ 747.637519] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 747.637519] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] self.driver.detach_volume(context, old_connection_info, [ 747.638100] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 747.638100] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] return self._volumeops.detach_volume(connection_info, instance) [ 747.638100] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 747.638100] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] self._detach_volume_vmdk(connection_info, instance) [ 747.638100] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 747.638100] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 747.638100] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 747.638100] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] stable_ref.fetch_moref(session) [ 747.638100] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 747.638100] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] raise exception.InstanceNotFound(instance_id=self._uuid) [ 747.638100] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] nova.exception.InstanceNotFound: Instance 1189fa93-608b-4684-a675-f1caf29a9f43 could not be found. [ 747.638100] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] [ 747.796060] env[68674]: DEBUG nova.compute.utils [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Build of instance 1189fa93-608b-4684-a675-f1caf29a9f43 aborted: Failed to rebuild volume backed instance. {{(pid=68674) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 747.801511] env[68674]: ERROR nova.compute.manager [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance 1189fa93-608b-4684-a675-f1caf29a9f43 aborted: Failed to rebuild volume backed instance. [ 747.801511] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Traceback (most recent call last): [ 747.801511] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 747.801511] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] self.driver.rebuild(**kwargs) [ 747.801511] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 747.801511] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] raise NotImplementedError() [ 747.801511] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] NotImplementedError [ 747.801511] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] [ 747.801511] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] During handling of the above exception, another exception occurred: [ 747.801511] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] [ 747.801511] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Traceback (most recent call last): [ 747.801511] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] File "/opt/stack/nova/nova/compute/manager.py", line 3643, in _rebuild_volume_backed_instance [ 747.801511] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] self._detach_root_volume(context, instance, root_bdm) [ 747.802110] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] File "/opt/stack/nova/nova/compute/manager.py", line 3622, in _detach_root_volume [ 747.802110] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] with excutils.save_and_reraise_exception(): [ 747.802110] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 747.802110] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] self.force_reraise() [ 747.802110] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 747.802110] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] raise self.value [ 747.802110] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 747.802110] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] self.driver.detach_volume(context, old_connection_info, [ 747.802110] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 747.802110] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] return self._volumeops.detach_volume(connection_info, instance) [ 747.802110] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 747.802110] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] self._detach_volume_vmdk(connection_info, instance) [ 747.802572] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 747.802572] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 747.802572] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 747.802572] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] stable_ref.fetch_moref(session) [ 747.802572] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 747.802572] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] raise exception.InstanceNotFound(instance_id=self._uuid) [ 747.802572] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] nova.exception.InstanceNotFound: Instance 1189fa93-608b-4684-a675-f1caf29a9f43 could not be found. [ 747.802572] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] [ 747.802572] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] During handling of the above exception, another exception occurred: [ 747.802572] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] [ 747.802572] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Traceback (most recent call last): [ 747.802572] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] File "/opt/stack/nova/nova/compute/manager.py", line 11471, in _error_out_instance_on_exception [ 747.802572] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] yield [ 747.802572] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] File "/opt/stack/nova/nova/compute/manager.py", line 3911, in rebuild_instance [ 747.803715] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] self._do_rebuild_instance_with_claim( [ 747.803715] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] File "/opt/stack/nova/nova/compute/manager.py", line 3997, in _do_rebuild_instance_with_claim [ 747.803715] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] self._do_rebuild_instance( [ 747.803715] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] File "/opt/stack/nova/nova/compute/manager.py", line 4189, in _do_rebuild_instance [ 747.803715] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] self._rebuild_default_impl(**kwargs) [ 747.803715] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] File "/opt/stack/nova/nova/compute/manager.py", line 3766, in _rebuild_default_impl [ 747.803715] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] self._rebuild_volume_backed_instance( [ 747.803715] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] File "/opt/stack/nova/nova/compute/manager.py", line 3658, in _rebuild_volume_backed_instance [ 747.803715] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] raise exception.BuildAbortException( [ 747.803715] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] nova.exception.BuildAbortException: Build of instance 1189fa93-608b-4684-a675-f1caf29a9f43 aborted: Failed to rebuild volume backed instance. [ 747.803715] env[68674]: ERROR nova.compute.manager [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] [ 747.812501] env[68674]: ERROR nova.scheduler.client.report [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [req-e66e10e8-d8f8-49de-ad41-0fb6a9ebc603] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ade3f042-7427-494b-9654-0b65e074850c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e66e10e8-d8f8-49de-ad41-0fb6a9ebc603"}]} [ 747.831446] env[68674]: DEBUG nova.scheduler.client.report [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Refreshing inventories for resource provider ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 747.847138] env[68674]: DEBUG nova.scheduler.client.report [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Updating ProviderTree inventory for provider ade3f042-7427-494b-9654-0b65e074850c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 747.847821] env[68674]: DEBUG nova.compute.provider_tree [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 747.861055] env[68674]: DEBUG nova.scheduler.client.report [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Refreshing aggregate associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, aggregates: None {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 747.882066] env[68674]: DEBUG nova.scheduler.client.report [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Refreshing trait associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 747.910244] env[68674]: INFO nova.compute.manager [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] instance snapshotting [ 747.913064] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e9ff93b-c6e6-4820-89a0-25211def0a12 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.947032] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0621dea9-7437-4f0f-849a-cafdd5e8d2c7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.002317] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3239891, 'name': Rename_Task, 'duration_secs': 0.293645} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.006194] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 748.006875] env[68674]: DEBUG oslo_concurrency.lockutils [req-f528bee6-76db-4795-aac9-53793a8d3ea7 req-3346a9e6-112c-4cbc-85f2-771728990bd6 service nova] Releasing lock "refresh_cache-5e3f667c-5d3a-4465-9186-779563087480" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 748.010830] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f8e4313e-9b02-44b5-ab34-db5e8f998d83 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.010830] env[68674]: DEBUG oslo_concurrency.lockutils [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Acquired lock "refresh_cache-5e3f667c-5d3a-4465-9186-779563087480" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 748.010830] env[68674]: DEBUG nova.network.neutron [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 748.017133] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 748.017133] env[68674]: value = "task-3239892" [ 748.017133] env[68674]: _type = "Task" [ 748.017133] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.035315] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3239892, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.060620] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239886, 'name': CreateVM_Task, 'duration_secs': 1.639547} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.060798] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 748.061520] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.063659] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 748.063659] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 748.063659] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a876dc8-67e2-47ed-b42e-fb25cfeb8f21 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.067871] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 748.067871] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52cd485f-62aa-9ecf-3c9d-ced5c5f4fe80" [ 748.067871] env[68674]: _type = "Task" [ 748.067871] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.078151] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52cd485f-62aa-9ecf-3c9d-ced5c5f4fe80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.429030] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29b7730d-b207-49fd-924f-8e5ffba3ce9d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.437557] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-079519a6-5234-4dd8-987a-51248f1d0097 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.469979] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Creating Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 748.470443] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-2975bd66-768b-4033-a045-775bca078260 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.472709] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aa0c0f2-627d-4a8e-a724-042a40b57b73 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.481737] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29a5053b-8c4b-48a9-aacc-f2fbc01958c0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.485580] env[68674]: DEBUG oslo_vmware.api [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for the task: (returnval){ [ 748.485580] env[68674]: value = "task-3239893" [ 748.485580] env[68674]: _type = "Task" [ 748.485580] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.497466] env[68674]: DEBUG nova.compute.provider_tree [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 748.503732] env[68674]: DEBUG oslo_vmware.api [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239893, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.528950] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3239892, 'name': PowerOnVM_Task} progress is 87%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.555072] env[68674]: DEBUG nova.network.neutron [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 748.580018] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52cd485f-62aa-9ecf-3c9d-ced5c5f4fe80, 'name': SearchDatastore_Task, 'duration_secs': 0.043964} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.582299] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 748.582537] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 748.582763] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.582906] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 748.583154] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 748.583418] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-feed55bf-92b5-4ff5-8597-a79187e65fb4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.591721] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 748.591887] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 748.592592] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d271ff39-17aa-48c2-8db1-2fe17005aa12 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.598471] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 748.598471] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]529779d8-da83-3c0e-dad4-c583e42a59d8" [ 748.598471] env[68674]: _type = "Task" [ 748.598471] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.607946] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]529779d8-da83-3c0e-dad4-c583e42a59d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.815617] env[68674]: DEBUG nova.network.neutron [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Updating instance_info_cache with network_info: [{"id": "3bad0946-10af-40d0-a8c2-a5469f09cf39", "address": "fa:16:3e:92:b8:08", "network": {"id": "6e938754-bc83-4806-86a2-808cd64ac44a", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-632467165-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30b0da251a0d4f9c96f907b31ef9d5e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4d3f69a-b086-4c3b-b976-5a848b63dfc4", "external-id": "nsx-vlan-transportzone-627", "segmentation_id": 627, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3bad0946-10", "ovs_interfaceid": "3bad0946-10af-40d0-a8c2-a5469f09cf39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.997078] env[68674]: DEBUG oslo_vmware.api [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239893, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.038617] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3239892, 'name': PowerOnVM_Task, 'duration_secs': 0.732425} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.038974] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 749.039280] env[68674]: INFO nova.compute.manager [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Took 9.29 seconds to spawn the instance on the hypervisor. [ 749.039565] env[68674]: DEBUG nova.compute.manager [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 749.042258] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5421a007-c880-4621-9f30-e8a47db43486 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.055181] env[68674]: DEBUG nova.scheduler.client.report [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Updated inventory for provider ade3f042-7427-494b-9654-0b65e074850c with generation 75 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 749.055514] env[68674]: DEBUG nova.compute.provider_tree [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Updating resource provider ade3f042-7427-494b-9654-0b65e074850c generation from 75 to 76 during operation: update_inventory {{(pid=68674) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 749.055768] env[68674]: DEBUG nova.compute.provider_tree [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 749.112627] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]529779d8-da83-3c0e-dad4-c583e42a59d8, 'name': SearchDatastore_Task, 'duration_secs': 0.010933} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.114073] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bda027bd-f7ff-4946-b68c-2e72c6bcc90e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.121146] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 749.121146] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]529f9996-5a45-47bc-0a11-f9434aabe1f9" [ 749.121146] env[68674]: _type = "Task" [ 749.121146] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.133888] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]529f9996-5a45-47bc-0a11-f9434aabe1f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.318295] env[68674]: DEBUG oslo_concurrency.lockutils [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Releasing lock "refresh_cache-5e3f667c-5d3a-4465-9186-779563087480" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 749.318607] env[68674]: DEBUG nova.compute.manager [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Instance network_info: |[{"id": "3bad0946-10af-40d0-a8c2-a5469f09cf39", "address": "fa:16:3e:92:b8:08", "network": {"id": "6e938754-bc83-4806-86a2-808cd64ac44a", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-632467165-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30b0da251a0d4f9c96f907b31ef9d5e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4d3f69a-b086-4c3b-b976-5a848b63dfc4", "external-id": "nsx-vlan-transportzone-627", "segmentation_id": 627, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3bad0946-10", "ovs_interfaceid": "3bad0946-10af-40d0-a8c2-a5469f09cf39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 749.319053] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:92:b8:08', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4d3f69a-b086-4c3b-b976-5a848b63dfc4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3bad0946-10af-40d0-a8c2-a5469f09cf39', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 749.328715] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Creating folder: Project (30b0da251a0d4f9c96f907b31ef9d5e0). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 749.329052] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0ea2752e-e156-46f4-9c0a-0464ad1f0a12 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.343263] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Created folder: Project (30b0da251a0d4f9c96f907b31ef9d5e0) in parent group-v647377. [ 749.343263] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Creating folder: Instances. Parent ref: group-v647519. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 749.343263] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9db8316d-3136-47ac-b562-180205eb993f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.353260] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Created folder: Instances in parent group-v647519. [ 749.353534] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 749.353736] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 749.353945] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5877ce9c-28d7-483f-9417-5e366bdaef8b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.382025] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 749.382025] env[68674]: value = "task-3239896" [ 749.382025] env[68674]: _type = "Task" [ 749.382025] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.390231] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239896, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.503455] env[68674]: DEBUG oslo_vmware.api [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239893, 'name': CreateSnapshot_Task, 'duration_secs': 0.954039} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.503455] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Created Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 749.504136] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05be3256-c8f6-46e4-9bc1-0127d151e686 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.567274] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.943s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 749.567833] env[68674]: DEBUG nova.compute.manager [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 749.571827] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 28.773s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 749.572035] env[68674]: DEBUG nova.objects.instance [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68674) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 749.576484] env[68674]: INFO nova.compute.manager [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Took 51.86 seconds to build instance. [ 749.635484] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]529f9996-5a45-47bc-0a11-f9434aabe1f9, 'name': SearchDatastore_Task, 'duration_secs': 0.016449} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.635838] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 749.636229] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 0e3c27fe-a2d9-45dc-9559-a678f90a6fef/0e3c27fe-a2d9-45dc-9559-a678f90a6fef.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 749.636600] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ec4b7926-73eb-4292-baf5-396c3a60b9df {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.645725] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 749.645725] env[68674]: value = "task-3239897" [ 749.645725] env[68674]: _type = "Task" [ 749.645725] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.656602] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3239897, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.817162] env[68674]: DEBUG oslo_concurrency.lockutils [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 749.893222] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239896, 'name': CreateVM_Task, 'duration_secs': 0.448583} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.893404] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 749.894099] env[68674]: DEBUG oslo_concurrency.lockutils [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.894292] env[68674]: DEBUG oslo_concurrency.lockutils [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 749.894622] env[68674]: DEBUG oslo_concurrency.lockutils [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 749.894887] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7fb045bf-feba-4d3c-83e1-d392f83d864f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.902690] env[68674]: DEBUG oslo_vmware.api [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Waiting for the task: (returnval){ [ 749.902690] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ce24bd-06c4-7ebc-e845-b99a73669a4d" [ 749.902690] env[68674]: _type = "Task" [ 749.902690] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.912503] env[68674]: DEBUG oslo_vmware.api [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ce24bd-06c4-7ebc-e845-b99a73669a4d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.032139] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Creating linked-clone VM from snapshot {{(pid=68674) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 750.032497] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-52c818c2-02cd-4592-a778-c7477c800a49 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.040742] env[68674]: DEBUG oslo_vmware.api [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for the task: (returnval){ [ 750.040742] env[68674]: value = "task-3239898" [ 750.040742] env[68674]: _type = "Task" [ 750.040742] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.051754] env[68674]: DEBUG oslo_vmware.api [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239898, 'name': CloneVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.082651] env[68674]: DEBUG nova.compute.utils [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 750.084027] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "1699f556-d451-40e3-a213-7edb753b03f1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 113.424s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 750.085067] env[68674]: DEBUG nova.compute.manager [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 750.085067] env[68674]: DEBUG nova.network.neutron [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 750.142807] env[68674]: DEBUG nova.policy [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '02bee05e80de43c3bdde6511a583b3d7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '40404e0b9c0042c58bc22c96799709af', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 750.158669] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3239897, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.416332] env[68674]: DEBUG oslo_vmware.api [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ce24bd-06c4-7ebc-e845-b99a73669a4d, 'name': SearchDatastore_Task, 'duration_secs': 0.023968} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.416630] env[68674]: DEBUG oslo_concurrency.lockutils [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 750.417284] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 750.417284] env[68674]: DEBUG oslo_concurrency.lockutils [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.417446] env[68674]: DEBUG oslo_concurrency.lockutils [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 750.417552] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 750.417830] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4a04fc3b-9bb4-4d21-9020-df69c5f1496b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.431607] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 750.431811] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 750.432734] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba008f2f-ef2b-49f3-af2d-e08f08b73b21 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.440621] env[68674]: DEBUG oslo_vmware.api [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Waiting for the task: (returnval){ [ 750.440621] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52169368-d219-99ed-4ce2-141743525401" [ 750.440621] env[68674]: _type = "Task" [ 750.440621] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.451543] env[68674]: DEBUG oslo_vmware.api [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52169368-d219-99ed-4ce2-141743525401, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.554356] env[68674]: DEBUG oslo_vmware.api [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239898, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.586497] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b79c256a-0c3f-46dc-b80e-7fa2b46b54b8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 750.587664] env[68674]: DEBUG oslo_concurrency.lockutils [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.039s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 750.589384] env[68674]: INFO nova.compute.claims [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 750.595843] env[68674]: DEBUG nova.compute.manager [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 750.598534] env[68674]: DEBUG nova.compute.manager [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 750.664173] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3239897, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.774214} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.664471] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 0e3c27fe-a2d9-45dc-9559-a678f90a6fef/0e3c27fe-a2d9-45dc-9559-a678f90a6fef.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 750.664690] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 750.664947] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-154defcb-c8d5-4f08-86fe-0ed8a70b0f85 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.673099] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 750.673099] env[68674]: value = "task-3239899" [ 750.673099] env[68674]: _type = "Task" [ 750.673099] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.674518] env[68674]: DEBUG nova.network.neutron [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Successfully created port: 71dd9ef4-7bf9-4f8c-a04c-23431a1d8112 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 750.685071] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3239899, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.858044] env[68674]: DEBUG oslo_concurrency.lockutils [None req-51680493-c30d-4fae-80de-39a4a6d76f22 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Acquiring lock "1189fa93-608b-4684-a675-f1caf29a9f43" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 750.858366] env[68674]: DEBUG oslo_concurrency.lockutils [None req-51680493-c30d-4fae-80de-39a4a6d76f22 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Lock "1189fa93-608b-4684-a675-f1caf29a9f43" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 750.859033] env[68674]: DEBUG oslo_concurrency.lockutils [None req-51680493-c30d-4fae-80de-39a4a6d76f22 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Acquiring lock "1189fa93-608b-4684-a675-f1caf29a9f43-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 750.859243] env[68674]: DEBUG oslo_concurrency.lockutils [None req-51680493-c30d-4fae-80de-39a4a6d76f22 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Lock "1189fa93-608b-4684-a675-f1caf29a9f43-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 750.859419] env[68674]: DEBUG oslo_concurrency.lockutils [None req-51680493-c30d-4fae-80de-39a4a6d76f22 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Lock "1189fa93-608b-4684-a675-f1caf29a9f43-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 750.862217] env[68674]: INFO nova.compute.manager [None req-51680493-c30d-4fae-80de-39a4a6d76f22 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Terminating instance [ 750.951838] env[68674]: DEBUG oslo_vmware.api [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52169368-d219-99ed-4ce2-141743525401, 'name': SearchDatastore_Task, 'duration_secs': 0.019321} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.952632] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b7a5355-9333-42bf-ba37-93629062a4d6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.958173] env[68674]: DEBUG oslo_vmware.api [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Waiting for the task: (returnval){ [ 750.958173] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]526aba4a-0265-e6b3-ef9f-992114ac2461" [ 750.958173] env[68674]: _type = "Task" [ 750.958173] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.966151] env[68674]: DEBUG oslo_vmware.api [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]526aba4a-0265-e6b3-ef9f-992114ac2461, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.052170] env[68674]: DEBUG oslo_vmware.api [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239898, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.141662] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 751.187153] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3239899, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068898} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.187479] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 751.188366] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deb9f96f-9491-4ec5-b786-c6295cd39f24 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.211609] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] 0e3c27fe-a2d9-45dc-9559-a678f90a6fef/0e3c27fe-a2d9-45dc-9559-a678f90a6fef.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 751.211979] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cbc106a0-eb83-4314-9bcd-d80bdd65d4f2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.234689] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 751.234689] env[68674]: value = "task-3239900" [ 751.234689] env[68674]: _type = "Task" [ 751.234689] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.243724] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3239900, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.367531] env[68674]: DEBUG nova.compute.manager [None req-51680493-c30d-4fae-80de-39a4a6d76f22 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 751.367952] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e10a8c24-4369-4e6e-9f9d-18d55b83aaa0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.383019] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-966f5d55-e155-4d46-885f-0f3f992ee374 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.418149] env[68674]: WARNING nova.virt.vmwareapi.driver [None req-51680493-c30d-4fae-80de-39a4a6d76f22 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 1189fa93-608b-4684-a675-f1caf29a9f43 could not be found. [ 751.418322] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-51680493-c30d-4fae-80de-39a4a6d76f22 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 751.418705] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b06f8112-f8f0-4b35-ad93-0947b582402a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.428182] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ec8c489-42a0-4b50-819a-6e974ec7302f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.463268] env[68674]: WARNING nova.virt.vmwareapi.vmops [None req-51680493-c30d-4fae-80de-39a4a6d76f22 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1189fa93-608b-4684-a675-f1caf29a9f43 could not be found. [ 751.463268] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-51680493-c30d-4fae-80de-39a4a6d76f22 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 751.463268] env[68674]: INFO nova.compute.manager [None req-51680493-c30d-4fae-80de-39a4a6d76f22 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Took 0.10 seconds to destroy the instance on the hypervisor. [ 751.463268] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-51680493-c30d-4fae-80de-39a4a6d76f22 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 751.467072] env[68674]: DEBUG nova.compute.manager [-] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 751.467225] env[68674]: DEBUG nova.network.neutron [-] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 751.475084] env[68674]: DEBUG oslo_vmware.api [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]526aba4a-0265-e6b3-ef9f-992114ac2461, 'name': SearchDatastore_Task, 'duration_secs': 0.034482} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.475325] env[68674]: DEBUG oslo_concurrency.lockutils [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 751.475596] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 5e3f667c-5d3a-4465-9186-779563087480/5e3f667c-5d3a-4465-9186-779563087480.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 751.475857] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e3c4c125-43fa-4feb-ac55-8e55fc002369 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.482358] env[68674]: DEBUG oslo_vmware.api [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Waiting for the task: (returnval){ [ 751.482358] env[68674]: value = "task-3239901" [ 751.482358] env[68674]: _type = "Task" [ 751.482358] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.490683] env[68674]: DEBUG oslo_vmware.api [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Task: {'id': task-3239901, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.553534] env[68674]: DEBUG oslo_vmware.api [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239898, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.616720] env[68674]: DEBUG nova.compute.manager [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 751.662840] env[68674]: DEBUG nova.virt.hardware [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 751.663154] env[68674]: DEBUG nova.virt.hardware [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 751.663309] env[68674]: DEBUG nova.virt.hardware [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 751.663506] env[68674]: DEBUG nova.virt.hardware [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 751.663674] env[68674]: DEBUG nova.virt.hardware [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 751.663834] env[68674]: DEBUG nova.virt.hardware [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 751.664076] env[68674]: DEBUG nova.virt.hardware [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 751.664267] env[68674]: DEBUG nova.virt.hardware [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 751.664445] env[68674]: DEBUG nova.virt.hardware [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 751.664635] env[68674]: DEBUG nova.virt.hardware [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 751.664821] env[68674]: DEBUG nova.virt.hardware [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 751.665764] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8bad873-e97d-48f5-a33f-efeb206df6cf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.678179] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f239ad-2dc4-454f-8a18-0a77dea0886a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.747490] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3239900, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.917496] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "e371ae6b-44fd-47ce-9c58-8981e7da5cbc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 751.917870] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "e371ae6b-44fd-47ce-9c58-8981e7da5cbc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 751.994736] env[68674]: DEBUG oslo_vmware.api [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Task: {'id': task-3239901, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.056996] env[68674]: DEBUG oslo_vmware.api [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239898, 'name': CloneVM_Task} progress is 95%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.225890] env[68674]: DEBUG nova.compute.manager [req-863e87a8-fda1-47e5-8c7c-f23fc8ce8a57 req-66bec300-1bee-4c36-921f-c49e799106d0 service nova] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Received event network-vif-deleted-f8fd3dc4-58cc-4298-8fe7-96a500eacace {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 752.226114] env[68674]: INFO nova.compute.manager [req-863e87a8-fda1-47e5-8c7c-f23fc8ce8a57 req-66bec300-1bee-4c36-921f-c49e799106d0 service nova] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Neutron deleted interface f8fd3dc4-58cc-4298-8fe7-96a500eacace; detaching it from the instance and deleting it from the info cache [ 752.226305] env[68674]: DEBUG nova.network.neutron [req-863e87a8-fda1-47e5-8c7c-f23fc8ce8a57 req-66bec300-1bee-4c36-921f-c49e799106d0 service nova] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.250777] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3239900, 'name': ReconfigVM_Task, 'duration_secs': 0.545736} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.250777] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Reconfigured VM instance instance-00000030 to attach disk [datastore1] 0e3c27fe-a2d9-45dc-9559-a678f90a6fef/0e3c27fe-a2d9-45dc-9559-a678f90a6fef.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 752.251478] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f959072c-aec0-450b-8de2-d45ef1d860a8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.259466] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 752.259466] env[68674]: value = "task-3239902" [ 752.259466] env[68674]: _type = "Task" [ 752.259466] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.274774] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3239902, 'name': Rename_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.342121] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb59744a-d033-4b5b-a89e-8c6f8b02d947 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.352301] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6737716a-584e-4bc6-8fe0-ae8e994b0722 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.389774] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72555785-ed16-4016-83d7-54c9b961363e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.397897] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f57c5a79-7d26-4fbd-b208-b26b0ee4d003 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.412336] env[68674]: DEBUG nova.compute.provider_tree [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 752.497360] env[68674]: DEBUG oslo_vmware.api [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Task: {'id': task-3239901, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.500604] env[68674]: DEBUG nova.compute.manager [req-02f4ac1f-4715-44c5-bf97-0eea7fcde39b req-35d4b5b3-5059-4518-8a44-13fc97b56f7e service nova] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Received event network-vif-plugged-71dd9ef4-7bf9-4f8c-a04c-23431a1d8112 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 752.500820] env[68674]: DEBUG oslo_concurrency.lockutils [req-02f4ac1f-4715-44c5-bf97-0eea7fcde39b req-35d4b5b3-5059-4518-8a44-13fc97b56f7e service nova] Acquiring lock "2007222e-e4e5-44b3-bd9e-55b4a2143c3e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 752.501052] env[68674]: DEBUG oslo_concurrency.lockutils [req-02f4ac1f-4715-44c5-bf97-0eea7fcde39b req-35d4b5b3-5059-4518-8a44-13fc97b56f7e service nova] Lock "2007222e-e4e5-44b3-bd9e-55b4a2143c3e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 752.501207] env[68674]: DEBUG oslo_concurrency.lockutils [req-02f4ac1f-4715-44c5-bf97-0eea7fcde39b req-35d4b5b3-5059-4518-8a44-13fc97b56f7e service nova] Lock "2007222e-e4e5-44b3-bd9e-55b4a2143c3e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 752.501569] env[68674]: DEBUG nova.compute.manager [req-02f4ac1f-4715-44c5-bf97-0eea7fcde39b req-35d4b5b3-5059-4518-8a44-13fc97b56f7e service nova] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] No waiting events found dispatching network-vif-plugged-71dd9ef4-7bf9-4f8c-a04c-23431a1d8112 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 752.501569] env[68674]: WARNING nova.compute.manager [req-02f4ac1f-4715-44c5-bf97-0eea7fcde39b req-35d4b5b3-5059-4518-8a44-13fc97b56f7e service nova] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Received unexpected event network-vif-plugged-71dd9ef4-7bf9-4f8c-a04c-23431a1d8112 for instance with vm_state building and task_state spawning. [ 752.555328] env[68674]: DEBUG oslo_vmware.api [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239898, 'name': CloneVM_Task, 'duration_secs': 2.133778} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.555522] env[68674]: INFO nova.virt.vmwareapi.vmops [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Created linked-clone VM from snapshot [ 752.556263] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d356a3b5-d169-406b-9b16-7a12158ea321 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.563678] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Uploading image c5ef204e-070f-42a2-912e-3be2b0141452 {{(pid=68674) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 752.592074] env[68674]: DEBUG oslo_vmware.rw_handles [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 752.592074] env[68674]: value = "vm-647523" [ 752.592074] env[68674]: _type = "VirtualMachine" [ 752.592074] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 752.592380] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-b1a5d27f-1a8c-4f87-95a2-f88940946a31 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.599061] env[68674]: DEBUG oslo_vmware.rw_handles [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Lease: (returnval){ [ 752.599061] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ed268e-a420-e532-d1e7-b762879aea3e" [ 752.599061] env[68674]: _type = "HttpNfcLease" [ 752.599061] env[68674]: } obtained for exporting VM: (result){ [ 752.599061] env[68674]: value = "vm-647523" [ 752.599061] env[68674]: _type = "VirtualMachine" [ 752.599061] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 752.599305] env[68674]: DEBUG oslo_vmware.api [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for the lease: (returnval){ [ 752.599305] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ed268e-a420-e532-d1e7-b762879aea3e" [ 752.599305] env[68674]: _type = "HttpNfcLease" [ 752.599305] env[68674]: } to be ready. {{(pid=68674) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 752.605605] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 752.605605] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ed268e-a420-e532-d1e7-b762879aea3e" [ 752.605605] env[68674]: _type = "HttpNfcLease" [ 752.605605] env[68674]: } is initializing. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 752.671576] env[68674]: DEBUG nova.network.neutron [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Successfully updated port: 71dd9ef4-7bf9-4f8c-a04c-23431a1d8112 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 752.686636] env[68674]: DEBUG nova.network.neutron [-] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.728912] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3f2ef972-5661-42c4-81e3-6eaea1991df9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.739437] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c1e0798-4de2-47b1-a182-72af47ad022e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.779937] env[68674]: DEBUG nova.compute.manager [req-863e87a8-fda1-47e5-8c7c-f23fc8ce8a57 req-66bec300-1bee-4c36-921f-c49e799106d0 service nova] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Detach interface failed, port_id=f8fd3dc4-58cc-4298-8fe7-96a500eacace, reason: Instance 1189fa93-608b-4684-a675-f1caf29a9f43 could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 752.788580] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3239902, 'name': Rename_Task, 'duration_secs': 0.315054} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.788580] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 752.788874] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-93c41cf4-df3c-4041-b541-db38426db8eb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.795890] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 752.795890] env[68674]: value = "task-3239904" [ 752.795890] env[68674]: _type = "Task" [ 752.795890] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.804021] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3239904, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.918418] env[68674]: DEBUG nova.scheduler.client.report [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 752.994593] env[68674]: DEBUG oslo_vmware.api [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Task: {'id': task-3239901, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.109414] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 753.109414] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ed268e-a420-e532-d1e7-b762879aea3e" [ 753.109414] env[68674]: _type = "HttpNfcLease" [ 753.109414] env[68674]: } is ready. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 753.109414] env[68674]: DEBUG oslo_vmware.rw_handles [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 753.109414] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ed268e-a420-e532-d1e7-b762879aea3e" [ 753.109414] env[68674]: _type = "HttpNfcLease" [ 753.109414] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 753.109819] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea0e2704-f6f6-4050-bf69-0f234f3bb3fe {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.116905] env[68674]: DEBUG oslo_vmware.rw_handles [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526e0487-99b3-2c26-2a53-a215cfa1185e/disk-0.vmdk from lease info. {{(pid=68674) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 753.117202] env[68674]: DEBUG oslo_vmware.rw_handles [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526e0487-99b3-2c26-2a53-a215cfa1185e/disk-0.vmdk for reading. {{(pid=68674) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 753.175094] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Acquiring lock "refresh_cache-2007222e-e4e5-44b3-bd9e-55b4a2143c3e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.175240] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Acquired lock "refresh_cache-2007222e-e4e5-44b3-bd9e-55b4a2143c3e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 753.175402] env[68674]: DEBUG nova.network.neutron [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 753.189842] env[68674]: INFO nova.compute.manager [-] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Took 1.72 seconds to deallocate network for instance. [ 753.288496] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d21eda50-877f-4b63-9f60-b58e77e76c2e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.309156] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3239904, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.424178] env[68674]: DEBUG oslo_concurrency.lockutils [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.836s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 753.424928] env[68674]: DEBUG nova.compute.manager [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 753.427729] env[68674]: DEBUG oslo_concurrency.lockutils [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.778s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 753.429223] env[68674]: INFO nova.compute.claims [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 753.656725] env[68674]: DEBUG oslo_vmware.api [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Task: {'id': task-3239901, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.613657} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.656725] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 5e3f667c-5d3a-4465-9186-779563087480/5e3f667c-5d3a-4465-9186-779563087480.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 753.656725] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 753.656725] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-11c65d5f-de46-4ed3-9883-4a8869cc356e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.667844] env[68674]: DEBUG oslo_vmware.api [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Waiting for the task: (returnval){ [ 753.667844] env[68674]: value = "task-3239905" [ 753.667844] env[68674]: _type = "Task" [ 753.667844] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.687141] env[68674]: DEBUG oslo_vmware.api [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Task: {'id': task-3239905, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.773894] env[68674]: DEBUG nova.network.neutron [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 753.787535] env[68674]: INFO nova.compute.manager [None req-51680493-c30d-4fae-80de-39a4a6d76f22 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Took 0.60 seconds to detach 1 volumes for instance. [ 753.791752] env[68674]: DEBUG nova.compute.manager [None req-51680493-c30d-4fae-80de-39a4a6d76f22 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Deleting volume: ad78e308-8afd-46bf-a8e5-5e31a5c091b0 {{(pid=68674) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 753.806988] env[68674]: DEBUG oslo_vmware.api [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3239904, 'name': PowerOnVM_Task, 'duration_secs': 0.644201} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.807680] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 753.808068] env[68674]: INFO nova.compute.manager [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Took 10.91 seconds to spawn the instance on the hypervisor. [ 753.808839] env[68674]: DEBUG nova.compute.manager [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 753.809852] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17b23b09-2c8e-4d10-b784-58a49ae0071d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.933722] env[68674]: DEBUG nova.compute.utils [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 753.948500] env[68674]: DEBUG nova.compute.manager [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 753.949886] env[68674]: DEBUG nova.network.neutron [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] [instance: 714142ec-89ad-44ab-8543-11493172a50b] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 754.019981] env[68674]: DEBUG nova.policy [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '81885c21c53d4bdab5a2ef8a411c1a45', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '741fafb6661f409fa15f55661286b21e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 754.183062] env[68674]: DEBUG oslo_vmware.api [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Task: {'id': task-3239905, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07234} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.183062] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 754.183637] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf5f9e13-5fc8-4c4f-baa1-acf9b3a3137b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.229937] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] 5e3f667c-5d3a-4465-9186-779563087480/5e3f667c-5d3a-4465-9186-779563087480.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 754.231697] env[68674]: DEBUG nova.network.neutron [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Updating instance_info_cache with network_info: [{"id": "71dd9ef4-7bf9-4f8c-a04c-23431a1d8112", "address": "fa:16:3e:f6:e7:1f", "network": {"id": "57a5ae11-4f33-4f74-9756-ee7a71e40eea", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1400887908-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "40404e0b9c0042c58bc22c96799709af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "604056d6-6dd6-47fa-9eaa-6863a3a7c488", "external-id": "nsx-vlan-transportzone-287", "segmentation_id": 287, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71dd9ef4-7b", "ovs_interfaceid": "71dd9ef4-7bf9-4f8c-a04c-23431a1d8112", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.233746] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8bdf1317-8d27-4e7f-acd4-6cce6d0aed37 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.254352] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Releasing lock "refresh_cache-2007222e-e4e5-44b3-bd9e-55b4a2143c3e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 754.254800] env[68674]: DEBUG nova.compute.manager [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Instance network_info: |[{"id": "71dd9ef4-7bf9-4f8c-a04c-23431a1d8112", "address": "fa:16:3e:f6:e7:1f", "network": {"id": "57a5ae11-4f33-4f74-9756-ee7a71e40eea", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1400887908-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "40404e0b9c0042c58bc22c96799709af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "604056d6-6dd6-47fa-9eaa-6863a3a7c488", "external-id": "nsx-vlan-transportzone-287", "segmentation_id": 287, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71dd9ef4-7b", "ovs_interfaceid": "71dd9ef4-7bf9-4f8c-a04c-23431a1d8112", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 754.255543] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f6:e7:1f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '604056d6-6dd6-47fa-9eaa-6863a3a7c488', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '71dd9ef4-7bf9-4f8c-a04c-23431a1d8112', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 754.266617] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Creating folder: Project (40404e0b9c0042c58bc22c96799709af). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 754.268369] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-49ecd172-a91b-4a3c-829f-71d4be9a865b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.273712] env[68674]: DEBUG oslo_vmware.api [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Waiting for the task: (returnval){ [ 754.273712] env[68674]: value = "task-3239907" [ 754.273712] env[68674]: _type = "Task" [ 754.273712] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.288274] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Created folder: Project (40404e0b9c0042c58bc22c96799709af) in parent group-v647377. [ 754.288274] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Creating folder: Instances. Parent ref: group-v647524. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 754.288274] env[68674]: DEBUG oslo_vmware.api [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Task: {'id': task-3239907, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.288274] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-67648bee-d83b-464c-9b0d-f5d9694c59dc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.298900] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Created folder: Instances in parent group-v647524. [ 754.299492] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 754.299601] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 754.299898] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-10664e23-60c0-4009-b045-3e6b735ce4f5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.321862] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 754.321862] env[68674]: value = "task-3239910" [ 754.321862] env[68674]: _type = "Task" [ 754.321862] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.334138] env[68674]: INFO nova.compute.manager [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Took 55.11 seconds to build instance. [ 754.344476] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239910, 'name': CreateVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.387688] env[68674]: DEBUG oslo_concurrency.lockutils [None req-51680493-c30d-4fae-80de-39a4a6d76f22 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 754.426361] env[68674]: DEBUG nova.network.neutron [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Successfully created port: 213a8151-04de-4dee-8d0e-06db0fbd89aa {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 754.442880] env[68674]: DEBUG nova.compute.manager [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 754.787051] env[68674]: DEBUG oslo_vmware.api [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Task: {'id': task-3239907, 'name': ReconfigVM_Task, 'duration_secs': 0.483576} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.787357] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Reconfigured VM instance instance-00000031 to attach disk [datastore1] 5e3f667c-5d3a-4465-9186-779563087480/5e3f667c-5d3a-4465-9186-779563087480.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 754.788009] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a577bbee-e460-4ffb-9e89-95012849ce39 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.794466] env[68674]: DEBUG oslo_vmware.api [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Waiting for the task: (returnval){ [ 754.794466] env[68674]: value = "task-3239911" [ 754.794466] env[68674]: _type = "Task" [ 754.794466] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.806633] env[68674]: DEBUG oslo_vmware.api [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Task: {'id': task-3239911, 'name': Rename_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.833557] env[68674]: DEBUG nova.compute.manager [req-f38e8fe3-ab64-4d7d-93c7-88a5a755db24 req-e1616b32-adf1-4422-b01c-aa99ff5f162e service nova] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Received event network-changed-71dd9ef4-7bf9-4f8c-a04c-23431a1d8112 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 754.833793] env[68674]: DEBUG nova.compute.manager [req-f38e8fe3-ab64-4d7d-93c7-88a5a755db24 req-e1616b32-adf1-4422-b01c-aa99ff5f162e service nova] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Refreshing instance network info cache due to event network-changed-71dd9ef4-7bf9-4f8c-a04c-23431a1d8112. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 754.834589] env[68674]: DEBUG oslo_concurrency.lockutils [req-f38e8fe3-ab64-4d7d-93c7-88a5a755db24 req-e1616b32-adf1-4422-b01c-aa99ff5f162e service nova] Acquiring lock "refresh_cache-2007222e-e4e5-44b3-bd9e-55b4a2143c3e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.834589] env[68674]: DEBUG oslo_concurrency.lockutils [req-f38e8fe3-ab64-4d7d-93c7-88a5a755db24 req-e1616b32-adf1-4422-b01c-aa99ff5f162e service nova] Acquired lock "refresh_cache-2007222e-e4e5-44b3-bd9e-55b4a2143c3e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 754.834589] env[68674]: DEBUG nova.network.neutron [req-f38e8fe3-ab64-4d7d-93c7-88a5a755db24 req-e1616b32-adf1-4422-b01c-aa99ff5f162e service nova] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Refreshing network info cache for port 71dd9ef4-7bf9-4f8c-a04c-23431a1d8112 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 754.842112] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1ec408e8-bae4-43e4-80d0-bd68e24f8a2d tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "0e3c27fe-a2d9-45dc-9559-a678f90a6fef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 118.144s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 754.842476] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239910, 'name': CreateVM_Task, 'duration_secs': 0.442144} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.847025] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 754.852753] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.852753] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 754.852753] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 754.853720] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b920ee7-2dc4-4ab4-af2a-1f44f933416f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.862347] env[68674]: DEBUG oslo_vmware.api [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Waiting for the task: (returnval){ [ 754.862347] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521fe32d-bf95-eed0-d028-544c8d87406d" [ 754.862347] env[68674]: _type = "Task" [ 754.862347] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.871506] env[68674]: DEBUG oslo_vmware.api [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521fe32d-bf95-eed0-d028-544c8d87406d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.131874] env[68674]: DEBUG oslo_concurrency.lockutils [None req-28d5d064-72d5-483c-8e42-9cd170ff8d28 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] Acquiring lock "503e9328-bbd8-414f-8bea-250ed8247d67" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 755.132102] env[68674]: DEBUG oslo_concurrency.lockutils [None req-28d5d064-72d5-483c-8e42-9cd170ff8d28 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] Lock "503e9328-bbd8-414f-8bea-250ed8247d67" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.147372] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac1920de-244e-4b61-8980-a2b16154aa0e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.156437] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c7fc92a-387b-46e6-8fa3-7b9b8b97831a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.189020] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db2d4b7d-da77-4279-a055-97cdb6f52b4f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.200709] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2044506-eb3f-4d85-8062-5986d0841c6d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.216079] env[68674]: DEBUG nova.compute.provider_tree [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 755.306464] env[68674]: DEBUG oslo_vmware.api [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Task: {'id': task-3239911, 'name': Rename_Task, 'duration_secs': 0.22041} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.306881] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 755.307149] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ff123cac-66d0-4827-a505-071ee725d828 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.313911] env[68674]: DEBUG oslo_vmware.api [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Waiting for the task: (returnval){ [ 755.313911] env[68674]: value = "task-3239912" [ 755.313911] env[68674]: _type = "Task" [ 755.313911] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.322216] env[68674]: DEBUG oslo_vmware.api [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Task: {'id': task-3239912, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.352032] env[68674]: DEBUG nova.compute.manager [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 755.373512] env[68674]: DEBUG oslo_vmware.api [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521fe32d-bf95-eed0-d028-544c8d87406d, 'name': SearchDatastore_Task, 'duration_secs': 0.013324} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.373828] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 755.374102] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 755.374353] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.374500] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 755.374693] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 755.374978] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-37b72f0a-2072-4708-9200-042e3ef55079 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.384283] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 755.384472] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 755.385218] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dcfc87a8-3339-47b4-98ae-3f28283431f0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.390642] env[68674]: DEBUG oslo_vmware.api [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Waiting for the task: (returnval){ [ 755.390642] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d38815-fcd5-1479-9968-5118c9e475c3" [ 755.390642] env[68674]: _type = "Task" [ 755.390642] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.401807] env[68674]: DEBUG oslo_vmware.api [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d38815-fcd5-1479-9968-5118c9e475c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.455143] env[68674]: DEBUG nova.compute.manager [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 755.462426] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8d43ef9d-ed2e-43da-a7c8-d1a410226ecf tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquiring lock "1699f556-d451-40e3-a213-7edb753b03f1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 755.462793] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8d43ef9d-ed2e-43da-a7c8-d1a410226ecf tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "1699f556-d451-40e3-a213-7edb753b03f1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.463133] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8d43ef9d-ed2e-43da-a7c8-d1a410226ecf tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquiring lock "1699f556-d451-40e3-a213-7edb753b03f1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 755.465510] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8d43ef9d-ed2e-43da-a7c8-d1a410226ecf tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "1699f556-d451-40e3-a213-7edb753b03f1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.465510] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8d43ef9d-ed2e-43da-a7c8-d1a410226ecf tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "1699f556-d451-40e3-a213-7edb753b03f1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 755.468922] env[68674]: INFO nova.compute.manager [None req-8d43ef9d-ed2e-43da-a7c8-d1a410226ecf tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Terminating instance [ 755.489424] env[68674]: DEBUG nova.virt.hardware [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 755.489424] env[68674]: DEBUG nova.virt.hardware [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 755.489424] env[68674]: DEBUG nova.virt.hardware [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 755.489679] env[68674]: DEBUG nova.virt.hardware [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 755.489958] env[68674]: DEBUG nova.virt.hardware [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 755.490923] env[68674]: DEBUG nova.virt.hardware [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 755.491315] env[68674]: DEBUG nova.virt.hardware [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 755.491705] env[68674]: DEBUG nova.virt.hardware [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 755.491967] env[68674]: DEBUG nova.virt.hardware [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 755.492226] env[68674]: DEBUG nova.virt.hardware [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 755.492537] env[68674]: DEBUG nova.virt.hardware [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 755.493577] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20dc35c4-387e-4a73-a24d-7f23a323fd60 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.504869] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2945a779-f367-4cbc-b62a-50411bc7f991 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.613104] env[68674]: DEBUG nova.network.neutron [req-f38e8fe3-ab64-4d7d-93c7-88a5a755db24 req-e1616b32-adf1-4422-b01c-aa99ff5f162e service nova] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Updated VIF entry in instance network info cache for port 71dd9ef4-7bf9-4f8c-a04c-23431a1d8112. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 755.613481] env[68674]: DEBUG nova.network.neutron [req-f38e8fe3-ab64-4d7d-93c7-88a5a755db24 req-e1616b32-adf1-4422-b01c-aa99ff5f162e service nova] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Updating instance_info_cache with network_info: [{"id": "71dd9ef4-7bf9-4f8c-a04c-23431a1d8112", "address": "fa:16:3e:f6:e7:1f", "network": {"id": "57a5ae11-4f33-4f74-9756-ee7a71e40eea", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1400887908-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "40404e0b9c0042c58bc22c96799709af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "604056d6-6dd6-47fa-9eaa-6863a3a7c488", "external-id": "nsx-vlan-transportzone-287", "segmentation_id": 287, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71dd9ef4-7b", "ovs_interfaceid": "71dd9ef4-7bf9-4f8c-a04c-23431a1d8112", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.638334] env[68674]: DEBUG nova.compute.utils [None req-28d5d064-72d5-483c-8e42-9cd170ff8d28 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 755.700943] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5aa7de98-f413-4bc1-9643-9089b02545e8 tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquiring lock "0e3c27fe-a2d9-45dc-9559-a678f90a6fef" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 755.701300] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5aa7de98-f413-4bc1-9643-9089b02545e8 tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "0e3c27fe-a2d9-45dc-9559-a678f90a6fef" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.701503] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5aa7de98-f413-4bc1-9643-9089b02545e8 tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquiring lock "0e3c27fe-a2d9-45dc-9559-a678f90a6fef-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 755.701705] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5aa7de98-f413-4bc1-9643-9089b02545e8 tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "0e3c27fe-a2d9-45dc-9559-a678f90a6fef-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.701882] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5aa7de98-f413-4bc1-9643-9089b02545e8 tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "0e3c27fe-a2d9-45dc-9559-a678f90a6fef-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 755.704345] env[68674]: INFO nova.compute.manager [None req-5aa7de98-f413-4bc1-9643-9089b02545e8 tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Terminating instance [ 755.718859] env[68674]: DEBUG nova.scheduler.client.report [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 755.827992] env[68674]: DEBUG oslo_vmware.api [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Task: {'id': task-3239912, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.882878] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 755.900769] env[68674]: DEBUG oslo_vmware.api [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d38815-fcd5-1479-9968-5118c9e475c3, 'name': SearchDatastore_Task, 'duration_secs': 0.015061} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.901643] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b845aaa5-2429-4664-a4eb-78f734c5be2d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.908747] env[68674]: DEBUG oslo_vmware.api [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Waiting for the task: (returnval){ [ 755.908747] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5295b857-7e6c-65f7-2916-d6ce24a3e6a9" [ 755.908747] env[68674]: _type = "Task" [ 755.908747] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.917173] env[68674]: DEBUG oslo_vmware.api [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5295b857-7e6c-65f7-2916-d6ce24a3e6a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.978123] env[68674]: DEBUG nova.compute.manager [None req-8d43ef9d-ed2e-43da-a7c8-d1a410226ecf tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 755.978701] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8d43ef9d-ed2e-43da-a7c8-d1a410226ecf tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 755.979786] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6711aeba-f0b1-48fb-b670-b5a250c98129 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.991598] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d43ef9d-ed2e-43da-a7c8-d1a410226ecf tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 755.991992] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b29f7af2-fa9b-406b-8300-9c60f16dd17c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.004219] env[68674]: DEBUG oslo_vmware.api [None req-8d43ef9d-ed2e-43da-a7c8-d1a410226ecf tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 756.004219] env[68674]: value = "task-3239913" [ 756.004219] env[68674]: _type = "Task" [ 756.004219] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.016660] env[68674]: DEBUG oslo_vmware.api [None req-8d43ef9d-ed2e-43da-a7c8-d1a410226ecf tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3239913, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.116838] env[68674]: DEBUG oslo_concurrency.lockutils [req-f38e8fe3-ab64-4d7d-93c7-88a5a755db24 req-e1616b32-adf1-4422-b01c-aa99ff5f162e service nova] Releasing lock "refresh_cache-2007222e-e4e5-44b3-bd9e-55b4a2143c3e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 756.141192] env[68674]: DEBUG oslo_concurrency.lockutils [None req-28d5d064-72d5-483c-8e42-9cd170ff8d28 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] Lock "503e9328-bbd8-414f-8bea-250ed8247d67" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 756.208455] env[68674]: DEBUG nova.compute.manager [None req-5aa7de98-f413-4bc1-9643-9089b02545e8 tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 756.208766] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa7de98-f413-4bc1-9643-9089b02545e8 tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 756.210610] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c9a4f19-2abb-432a-bdaa-77c009804ec3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.219242] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5aa7de98-f413-4bc1-9643-9089b02545e8 tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 756.219645] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ec382459-5731-4c0d-9718-72213f8db89b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.224084] env[68674]: DEBUG oslo_concurrency.lockutils [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.796s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 756.224813] env[68674]: DEBUG nova.compute.manager [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 756.229051] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 30.773s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 756.229177] env[68674]: DEBUG nova.objects.instance [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68674) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 756.233218] env[68674]: DEBUG oslo_vmware.api [None req-5aa7de98-f413-4bc1-9643-9089b02545e8 tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 756.233218] env[68674]: value = "task-3239914" [ 756.233218] env[68674]: _type = "Task" [ 756.233218] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.245467] env[68674]: DEBUG oslo_vmware.api [None req-5aa7de98-f413-4bc1-9643-9089b02545e8 tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3239914, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.254174] env[68674]: DEBUG nova.network.neutron [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Successfully updated port: 213a8151-04de-4dee-8d0e-06db0fbd89aa {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 756.326490] env[68674]: DEBUG oslo_vmware.api [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Task: {'id': task-3239912, 'name': PowerOnVM_Task, 'duration_secs': 0.714521} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.326943] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 756.327359] env[68674]: INFO nova.compute.manager [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Took 10.72 seconds to spawn the instance on the hypervisor. [ 756.327683] env[68674]: DEBUG nova.compute.manager [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 756.328650] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77233991-f50f-4adb-acfe-63f1d106f6e3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.419257] env[68674]: DEBUG oslo_vmware.api [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5295b857-7e6c-65f7-2916-d6ce24a3e6a9, 'name': SearchDatastore_Task, 'duration_secs': 0.02134} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.419595] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 756.419817] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 2007222e-e4e5-44b3-bd9e-55b4a2143c3e/2007222e-e4e5-44b3-bd9e-55b4a2143c3e.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 756.420094] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3f9812c7-10b2-4f2a-b3f4-afee5421398f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.427325] env[68674]: DEBUG oslo_vmware.api [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Waiting for the task: (returnval){ [ 756.427325] env[68674]: value = "task-3239915" [ 756.427325] env[68674]: _type = "Task" [ 756.427325] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.435676] env[68674]: DEBUG oslo_vmware.api [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': task-3239915, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.514563] env[68674]: DEBUG oslo_vmware.api [None req-8d43ef9d-ed2e-43da-a7c8-d1a410226ecf tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3239913, 'name': PowerOffVM_Task, 'duration_secs': 0.291311} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.514843] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d43ef9d-ed2e-43da-a7c8-d1a410226ecf tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 756.515053] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8d43ef9d-ed2e-43da-a7c8-d1a410226ecf tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 756.515273] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8b2de790-ccbd-4556-b713-d7f2a8e9a3a9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.715462] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8d43ef9d-ed2e-43da-a7c8-d1a410226ecf tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 756.715718] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8d43ef9d-ed2e-43da-a7c8-d1a410226ecf tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 756.715911] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d43ef9d-ed2e-43da-a7c8-d1a410226ecf tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Deleting the datastore file [datastore2] 1699f556-d451-40e3-a213-7edb753b03f1 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 756.717520] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2afaca1f-6f28-4508-a184-9b38a6f29638 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.725895] env[68674]: DEBUG oslo_vmware.api [None req-8d43ef9d-ed2e-43da-a7c8-d1a410226ecf tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 756.725895] env[68674]: value = "task-3239917" [ 756.725895] env[68674]: _type = "Task" [ 756.725895] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.735393] env[68674]: DEBUG nova.compute.manager [req-14526ec1-dec7-4d4e-8c77-886f73f46292 req-6fb5a4ed-e6ee-445e-9dc1-7f5c0e2faec3 service nova] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Received event network-vif-plugged-213a8151-04de-4dee-8d0e-06db0fbd89aa {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 756.735393] env[68674]: DEBUG oslo_concurrency.lockutils [req-14526ec1-dec7-4d4e-8c77-886f73f46292 req-6fb5a4ed-e6ee-445e-9dc1-7f5c0e2faec3 service nova] Acquiring lock "714142ec-89ad-44ab-8543-11493172a50b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 756.737593] env[68674]: DEBUG oslo_concurrency.lockutils [req-14526ec1-dec7-4d4e-8c77-886f73f46292 req-6fb5a4ed-e6ee-445e-9dc1-7f5c0e2faec3 service nova] Lock "714142ec-89ad-44ab-8543-11493172a50b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.003s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 756.737978] env[68674]: DEBUG oslo_concurrency.lockutils [req-14526ec1-dec7-4d4e-8c77-886f73f46292 req-6fb5a4ed-e6ee-445e-9dc1-7f5c0e2faec3 service nova] Lock "714142ec-89ad-44ab-8543-11493172a50b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 756.738306] env[68674]: DEBUG nova.compute.manager [req-14526ec1-dec7-4d4e-8c77-886f73f46292 req-6fb5a4ed-e6ee-445e-9dc1-7f5c0e2faec3 service nova] [instance: 714142ec-89ad-44ab-8543-11493172a50b] No waiting events found dispatching network-vif-plugged-213a8151-04de-4dee-8d0e-06db0fbd89aa {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 756.738616] env[68674]: WARNING nova.compute.manager [req-14526ec1-dec7-4d4e-8c77-886f73f46292 req-6fb5a4ed-e6ee-445e-9dc1-7f5c0e2faec3 service nova] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Received unexpected event network-vif-plugged-213a8151-04de-4dee-8d0e-06db0fbd89aa for instance with vm_state building and task_state spawning. [ 756.745781] env[68674]: DEBUG nova.compute.utils [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 756.751483] env[68674]: DEBUG oslo_vmware.api [None req-8d43ef9d-ed2e-43da-a7c8-d1a410226ecf tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3239917, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.752436] env[68674]: DEBUG nova.compute.manager [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 756.752748] env[68674]: DEBUG nova.network.neutron [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 756.760706] env[68674]: DEBUG oslo_concurrency.lockutils [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Acquiring lock "refresh_cache-714142ec-89ad-44ab-8543-11493172a50b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.761361] env[68674]: DEBUG oslo_concurrency.lockutils [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Acquired lock "refresh_cache-714142ec-89ad-44ab-8543-11493172a50b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 756.761361] env[68674]: DEBUG nova.network.neutron [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 756.771679] env[68674]: DEBUG oslo_vmware.api [None req-5aa7de98-f413-4bc1-9643-9089b02545e8 tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3239914, 'name': PowerOffVM_Task, 'duration_secs': 0.238502} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.771982] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5aa7de98-f413-4bc1-9643-9089b02545e8 tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 756.772481] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa7de98-f413-4bc1-9643-9089b02545e8 tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 756.773088] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e54dbbce-9dfa-4cab-b499-85e366d3e9b0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.826154] env[68674]: DEBUG nova.policy [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fcdb66599bea45219bbf9401434e9024', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c5be31196e1f452e8768b57c105d1765', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 756.849512] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa7de98-f413-4bc1-9643-9089b02545e8 tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 756.849922] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa7de98-f413-4bc1-9643-9089b02545e8 tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 756.850032] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-5aa7de98-f413-4bc1-9643-9089b02545e8 tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Deleting the datastore file [datastore1] 0e3c27fe-a2d9-45dc-9559-a678f90a6fef {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 756.856179] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7eb785b2-d586-48d0-9e43-c411b46cae38 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.859520] env[68674]: INFO nova.compute.manager [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Took 49.90 seconds to build instance. [ 756.866758] env[68674]: DEBUG oslo_vmware.api [None req-5aa7de98-f413-4bc1-9643-9089b02545e8 tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 756.866758] env[68674]: value = "task-3239919" [ 756.866758] env[68674]: _type = "Task" [ 756.866758] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.876260] env[68674]: DEBUG oslo_vmware.api [None req-5aa7de98-f413-4bc1-9643-9089b02545e8 tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3239919, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.939012] env[68674]: DEBUG oslo_vmware.api [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': task-3239915, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.207398] env[68674]: DEBUG nova.network.neutron [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Successfully created port: fe21252e-0622-4932-bd5d-cd1a3e5ecdc4 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 757.217484] env[68674]: DEBUG oslo_concurrency.lockutils [None req-28d5d064-72d5-483c-8e42-9cd170ff8d28 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] Acquiring lock "503e9328-bbd8-414f-8bea-250ed8247d67" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 757.217717] env[68674]: DEBUG oslo_concurrency.lockutils [None req-28d5d064-72d5-483c-8e42-9cd170ff8d28 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] Lock "503e9328-bbd8-414f-8bea-250ed8247d67" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 757.218025] env[68674]: INFO nova.compute.manager [None req-28d5d064-72d5-483c-8e42-9cd170ff8d28 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Attaching volume d79cd12d-5be5-4762-bb3d-a4b82e0afd67 to /dev/sdb [ 757.236415] env[68674]: DEBUG oslo_vmware.api [None req-8d43ef9d-ed2e-43da-a7c8-d1a410226ecf tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3239917, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.343979} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.236714] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d43ef9d-ed2e-43da-a7c8-d1a410226ecf tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 757.236896] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8d43ef9d-ed2e-43da-a7c8-d1a410226ecf tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 757.237137] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8d43ef9d-ed2e-43da-a7c8-d1a410226ecf tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 757.237351] env[68674]: INFO nova.compute.manager [None req-8d43ef9d-ed2e-43da-a7c8-d1a410226ecf tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Took 1.26 seconds to destroy the instance on the hypervisor. [ 757.237596] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8d43ef9d-ed2e-43da-a7c8-d1a410226ecf tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 757.237783] env[68674]: DEBUG nova.compute.manager [-] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 757.237942] env[68674]: DEBUG nova.network.neutron [-] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 757.253059] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5e0ad6c0-e861-4317-95c2-e4c07f8a659b tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.024s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 757.255404] env[68674]: DEBUG nova.compute.manager [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 757.256746] env[68674]: DEBUG oslo_concurrency.lockutils [None req-71ee26b0-70c2-489a-aa90-4e98d73a2db8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.865s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 757.256966] env[68674]: DEBUG nova.objects.instance [None req-71ee26b0-70c2-489a-aa90-4e98d73a2db8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lazy-loading 'resources' on Instance uuid 505b0352-39ab-4841-8766-14626af2b13e {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 757.260300] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ec3584a-26c9-482b-b4b2-056d20eb898e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.274780] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3695fb87-2547-4eac-9b7f-a48972d29144 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.288732] env[68674]: DEBUG nova.virt.block_device [None req-28d5d064-72d5-483c-8e42-9cd170ff8d28 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Updating existing volume attachment record: 3534874a-7ffe-485d-a494-3906e273d637 {{(pid=68674) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 757.330533] env[68674]: DEBUG nova.network.neutron [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 757.364024] env[68674]: DEBUG oslo_concurrency.lockutils [None req-081090ae-c75a-46b1-b69e-e987e3c2baa7 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Lock "5e3f667c-5d3a-4465-9186-779563087480" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 110.822s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 757.386040] env[68674]: DEBUG oslo_vmware.api [None req-5aa7de98-f413-4bc1-9643-9089b02545e8 tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3239919, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.181614} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.386040] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-5aa7de98-f413-4bc1-9643-9089b02545e8 tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 757.387057] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa7de98-f413-4bc1-9643-9089b02545e8 tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 757.387057] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa7de98-f413-4bc1-9643-9089b02545e8 tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 757.387057] env[68674]: INFO nova.compute.manager [None req-5aa7de98-f413-4bc1-9643-9089b02545e8 tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Took 1.18 seconds to destroy the instance on the hypervisor. [ 757.387320] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5aa7de98-f413-4bc1-9643-9089b02545e8 tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 757.387481] env[68674]: DEBUG nova.compute.manager [-] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 757.387910] env[68674]: DEBUG nova.network.neutron [-] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 757.439426] env[68674]: DEBUG oslo_vmware.api [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': task-3239915, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513899} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.439861] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 2007222e-e4e5-44b3-bd9e-55b4a2143c3e/2007222e-e4e5-44b3-bd9e-55b4a2143c3e.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 757.440266] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 757.440675] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-118d1e2d-be60-4066-b9d9-94e64cde0e63 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.449194] env[68674]: DEBUG oslo_vmware.api [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Waiting for the task: (returnval){ [ 757.449194] env[68674]: value = "task-3239922" [ 757.449194] env[68674]: _type = "Task" [ 757.449194] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.460968] env[68674]: DEBUG oslo_vmware.api [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': task-3239922, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.866989] env[68674]: DEBUG nova.compute.manager [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 757.946083] env[68674]: DEBUG nova.compute.manager [req-3b99d62c-d63d-4e16-8bd4-02f2c38f5410 req-40d1fca7-2953-477b-9cba-d4e448ddd783 service nova] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Received event network-vif-deleted-8ccc7517-d8c1-44f5-a6ef-52fd819f4904 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 757.946083] env[68674]: INFO nova.compute.manager [req-3b99d62c-d63d-4e16-8bd4-02f2c38f5410 req-40d1fca7-2953-477b-9cba-d4e448ddd783 service nova] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Neutron deleted interface 8ccc7517-d8c1-44f5-a6ef-52fd819f4904; detaching it from the instance and deleting it from the info cache [ 757.946083] env[68674]: DEBUG nova.network.neutron [req-3b99d62c-d63d-4e16-8bd4-02f2c38f5410 req-40d1fca7-2953-477b-9cba-d4e448ddd783 service nova] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.955153] env[68674]: DEBUG nova.network.neutron [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Updating instance_info_cache with network_info: [{"id": "213a8151-04de-4dee-8d0e-06db0fbd89aa", "address": "fa:16:3e:21:1f:03", "network": {"id": "0232e7d9-d9e3-4ac3-b5b4-79579c4d44ae", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-129129078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "741fafb6661f409fa15f55661286b21e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "683a619f-b10d-41a3-8c03-4f69f6c9ce53", "external-id": "nsx-vlan-transportzone-898", "segmentation_id": 898, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap213a8151-04", "ovs_interfaceid": "213a8151-04de-4dee-8d0e-06db0fbd89aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.963667] env[68674]: DEBUG oslo_vmware.api [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': task-3239922, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073914} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.965365] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 757.968890] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebed933c-d48d-46ae-bb56-947097712961 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.994132] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Reconfiguring VM instance instance-00000032 to attach disk [datastore2] 2007222e-e4e5-44b3-bd9e-55b4a2143c3e/2007222e-e4e5-44b3-bd9e-55b4a2143c3e.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 757.997340] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d932fa0a-d2e3-4161-8c3a-e34770b8b08f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.022917] env[68674]: DEBUG oslo_vmware.api [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Waiting for the task: (returnval){ [ 758.022917] env[68674]: value = "task-3239924" [ 758.022917] env[68674]: _type = "Task" [ 758.022917] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.033301] env[68674]: DEBUG oslo_vmware.api [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': task-3239924, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.229721] env[68674]: DEBUG nova.network.neutron [-] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.274379] env[68674]: DEBUG nova.compute.manager [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 758.297690] env[68674]: DEBUG nova.virt.hardware [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 758.298054] env[68674]: DEBUG nova.virt.hardware [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 758.298142] env[68674]: DEBUG nova.virt.hardware [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 758.298334] env[68674]: DEBUG nova.virt.hardware [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 758.298482] env[68674]: DEBUG nova.virt.hardware [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 758.298632] env[68674]: DEBUG nova.virt.hardware [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 758.298840] env[68674]: DEBUG nova.virt.hardware [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 758.298999] env[68674]: DEBUG nova.virt.hardware [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 758.299192] env[68674]: DEBUG nova.virt.hardware [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 758.299443] env[68674]: DEBUG nova.virt.hardware [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 758.299645] env[68674]: DEBUG nova.virt.hardware [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 758.300524] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33091b93-33f6-4f53-9efb-3fb697432a4a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.313530] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-595cc32d-fce4-4a57-801d-fbecc20283b2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.387779] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 758.430859] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bad2319-2e09-41c4-80d9-3a9118f014f3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.439532] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cc1a51d-437d-42b1-9649-ea24b99c318f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.446618] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e24fb29d-adee-4533-a1b4-cf4d97726486 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.473976] env[68674]: DEBUG oslo_concurrency.lockutils [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Releasing lock "refresh_cache-714142ec-89ad-44ab-8543-11493172a50b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 758.474339] env[68674]: DEBUG nova.compute.manager [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Instance network_info: |[{"id": "213a8151-04de-4dee-8d0e-06db0fbd89aa", "address": "fa:16:3e:21:1f:03", "network": {"id": "0232e7d9-d9e3-4ac3-b5b4-79579c4d44ae", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-129129078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "741fafb6661f409fa15f55661286b21e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "683a619f-b10d-41a3-8c03-4f69f6c9ce53", "external-id": "nsx-vlan-transportzone-898", "segmentation_id": 898, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap213a8151-04", "ovs_interfaceid": "213a8151-04de-4dee-8d0e-06db0fbd89aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 758.475887] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:1f:03', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '683a619f-b10d-41a3-8c03-4f69f6c9ce53', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '213a8151-04de-4dee-8d0e-06db0fbd89aa', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 758.483602] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Creating folder: Project (741fafb6661f409fa15f55661286b21e). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 758.484452] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79b451bc-d4d2-4c99-85a6-d6e49f64db37 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.488295] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7baca39c-6797-4e62-b8b9-edee191bdd87 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.495144] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ae75e94-0272-4721-bc94-34504810cfb6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.508912] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5a79312-6f78-4dc2-952b-38f688df19e7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.542383] env[68674]: DEBUG nova.compute.manager [req-3b99d62c-d63d-4e16-8bd4-02f2c38f5410 req-40d1fca7-2953-477b-9cba-d4e448ddd783 service nova] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Detach interface failed, port_id=8ccc7517-d8c1-44f5-a6ef-52fd819f4904, reason: Instance 1699f556-d451-40e3-a213-7edb753b03f1 could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 758.543117] env[68674]: DEBUG nova.compute.provider_tree [None req-71ee26b0-70c2-489a-aa90-4e98d73a2db8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 758.548441] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Created folder: Project (741fafb6661f409fa15f55661286b21e) in parent group-v647377. [ 758.548621] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Creating folder: Instances. Parent ref: group-v647530. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 758.549108] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f2a121fe-2e74-4cc3-840a-332a729aca10 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.558322] env[68674]: DEBUG oslo_vmware.api [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': task-3239924, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.560580] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Created folder: Instances in parent group-v647530. [ 758.560822] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 758.561095] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 758.561240] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3f8f3b76-2156-4846-9097-bda6792f8845 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.580733] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 758.580733] env[68674]: value = "task-3239927" [ 758.580733] env[68674]: _type = "Task" [ 758.580733] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.589867] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239927, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.679539] env[68674]: DEBUG nova.network.neutron [-] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.733259] env[68674]: INFO nova.compute.manager [-] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Took 1.49 seconds to deallocate network for instance. [ 758.766926] env[68674]: DEBUG nova.compute.manager [req-bd07e3b6-1d23-4c5a-a3e4-8bab795c94ca req-c1722e79-1a7d-45f5-b69d-c68672e649b3 service nova] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Received event network-changed-213a8151-04de-4dee-8d0e-06db0fbd89aa {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 758.767509] env[68674]: DEBUG nova.compute.manager [req-bd07e3b6-1d23-4c5a-a3e4-8bab795c94ca req-c1722e79-1a7d-45f5-b69d-c68672e649b3 service nova] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Refreshing instance network info cache due to event network-changed-213a8151-04de-4dee-8d0e-06db0fbd89aa. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 758.767853] env[68674]: DEBUG oslo_concurrency.lockutils [req-bd07e3b6-1d23-4c5a-a3e4-8bab795c94ca req-c1722e79-1a7d-45f5-b69d-c68672e649b3 service nova] Acquiring lock "refresh_cache-714142ec-89ad-44ab-8543-11493172a50b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.768132] env[68674]: DEBUG oslo_concurrency.lockutils [req-bd07e3b6-1d23-4c5a-a3e4-8bab795c94ca req-c1722e79-1a7d-45f5-b69d-c68672e649b3 service nova] Acquired lock "refresh_cache-714142ec-89ad-44ab-8543-11493172a50b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 758.768625] env[68674]: DEBUG nova.network.neutron [req-bd07e3b6-1d23-4c5a-a3e4-8bab795c94ca req-c1722e79-1a7d-45f5-b69d-c68672e649b3 service nova] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Refreshing network info cache for port 213a8151-04de-4dee-8d0e-06db0fbd89aa {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 759.053058] env[68674]: DEBUG nova.scheduler.client.report [None req-71ee26b0-70c2-489a-aa90-4e98d73a2db8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 759.056267] env[68674]: DEBUG oslo_vmware.api [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': task-3239924, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.091857] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239927, 'name': CreateVM_Task, 'duration_secs': 0.36201} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.092066] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 759.092824] env[68674]: DEBUG oslo_concurrency.lockutils [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.092987] env[68674]: DEBUG oslo_concurrency.lockutils [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 759.093336] env[68674]: DEBUG oslo_concurrency.lockutils [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 759.093621] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5c96e92-83d8-42ad-b5d0-def7c5b2d4b2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.098785] env[68674]: DEBUG oslo_vmware.api [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Waiting for the task: (returnval){ [ 759.098785] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52aed7dd-f259-ad04-1e1b-7d194203252c" [ 759.098785] env[68674]: _type = "Task" [ 759.098785] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.106714] env[68674]: DEBUG oslo_vmware.api [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52aed7dd-f259-ad04-1e1b-7d194203252c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.183928] env[68674]: INFO nova.compute.manager [-] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Took 1.80 seconds to deallocate network for instance. [ 759.242499] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8d43ef9d-ed2e-43da-a7c8-d1a410226ecf tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 759.484562] env[68674]: DEBUG nova.network.neutron [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Successfully updated port: fe21252e-0622-4932-bd5d-cd1a3e5ecdc4 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 759.554384] env[68674]: DEBUG oslo_vmware.api [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': task-3239924, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.558279] env[68674]: DEBUG oslo_concurrency.lockutils [None req-71ee26b0-70c2-489a-aa90-4e98d73a2db8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.301s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 759.560411] env[68674]: DEBUG oslo_concurrency.lockutils [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.843s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 759.561930] env[68674]: INFO nova.compute.claims [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 759.584463] env[68674]: INFO nova.scheduler.client.report [None req-71ee26b0-70c2-489a-aa90-4e98d73a2db8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Deleted allocations for instance 505b0352-39ab-4841-8766-14626af2b13e [ 759.608139] env[68674]: DEBUG nova.network.neutron [req-bd07e3b6-1d23-4c5a-a3e4-8bab795c94ca req-c1722e79-1a7d-45f5-b69d-c68672e649b3 service nova] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Updated VIF entry in instance network info cache for port 213a8151-04de-4dee-8d0e-06db0fbd89aa. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 759.608579] env[68674]: DEBUG nova.network.neutron [req-bd07e3b6-1d23-4c5a-a3e4-8bab795c94ca req-c1722e79-1a7d-45f5-b69d-c68672e649b3 service nova] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Updating instance_info_cache with network_info: [{"id": "213a8151-04de-4dee-8d0e-06db0fbd89aa", "address": "fa:16:3e:21:1f:03", "network": {"id": "0232e7d9-d9e3-4ac3-b5b4-79579c4d44ae", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-129129078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "741fafb6661f409fa15f55661286b21e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "683a619f-b10d-41a3-8c03-4f69f6c9ce53", "external-id": "nsx-vlan-transportzone-898", "segmentation_id": 898, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap213a8151-04", "ovs_interfaceid": "213a8151-04de-4dee-8d0e-06db0fbd89aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.613422] env[68674]: DEBUG oslo_vmware.api [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52aed7dd-f259-ad04-1e1b-7d194203252c, 'name': SearchDatastore_Task, 'duration_secs': 0.010201} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.614028] env[68674]: DEBUG oslo_concurrency.lockutils [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 759.614281] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 759.614539] env[68674]: DEBUG oslo_concurrency.lockutils [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.614690] env[68674]: DEBUG oslo_concurrency.lockutils [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 759.614870] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 759.615743] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cf169769-6e0f-403e-9bdd-6d662d8d8720 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.625081] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 759.626374] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 759.626374] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3762d757-5d51-4475-a4b0-18dad8b7d6e8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.631560] env[68674]: DEBUG oslo_vmware.api [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Waiting for the task: (returnval){ [ 759.631560] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524364cc-6dba-8f5b-496e-581dfadbbf85" [ 759.631560] env[68674]: _type = "Task" [ 759.631560] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.642415] env[68674]: DEBUG oslo_vmware.api [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524364cc-6dba-8f5b-496e-581dfadbbf85, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.690494] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5aa7de98-f413-4bc1-9643-9089b02545e8 tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 759.982387] env[68674]: DEBUG nova.compute.manager [req-a7942d6a-affa-425c-80a4-55c022ae6305 req-de384266-3522-4c9d-a136-4968ad618fd9 service nova] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Received event network-changed-3bad0946-10af-40d0-a8c2-a5469f09cf39 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 759.982387] env[68674]: DEBUG nova.compute.manager [req-a7942d6a-affa-425c-80a4-55c022ae6305 req-de384266-3522-4c9d-a136-4968ad618fd9 service nova] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Refreshing instance network info cache due to event network-changed-3bad0946-10af-40d0-a8c2-a5469f09cf39. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 759.982387] env[68674]: DEBUG oslo_concurrency.lockutils [req-a7942d6a-affa-425c-80a4-55c022ae6305 req-de384266-3522-4c9d-a136-4968ad618fd9 service nova] Acquiring lock "refresh_cache-5e3f667c-5d3a-4465-9186-779563087480" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.982878] env[68674]: DEBUG oslo_concurrency.lockutils [req-a7942d6a-affa-425c-80a4-55c022ae6305 req-de384266-3522-4c9d-a136-4968ad618fd9 service nova] Acquired lock "refresh_cache-5e3f667c-5d3a-4465-9186-779563087480" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 759.982878] env[68674]: DEBUG nova.network.neutron [req-a7942d6a-affa-425c-80a4-55c022ae6305 req-de384266-3522-4c9d-a136-4968ad618fd9 service nova] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Refreshing network info cache for port 3bad0946-10af-40d0-a8c2-a5469f09cf39 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 759.986934] env[68674]: DEBUG oslo_concurrency.lockutils [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Acquiring lock "refresh_cache-55727bbc-6b65-4e4c-ba4f-8240efbf052a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.987110] env[68674]: DEBUG oslo_concurrency.lockutils [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Acquired lock "refresh_cache-55727bbc-6b65-4e4c-ba4f-8240efbf052a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 759.987276] env[68674]: DEBUG nova.network.neutron [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 760.054220] env[68674]: DEBUG oslo_vmware.api [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': task-3239924, 'name': ReconfigVM_Task, 'duration_secs': 1.612578} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.054577] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Reconfigured VM instance instance-00000032 to attach disk [datastore2] 2007222e-e4e5-44b3-bd9e-55b4a2143c3e/2007222e-e4e5-44b3-bd9e-55b4a2143c3e.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 760.055148] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-acd72462-ec02-4b2e-9192-92757df07bb3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.062114] env[68674]: DEBUG oslo_vmware.api [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Waiting for the task: (returnval){ [ 760.062114] env[68674]: value = "task-3239929" [ 760.062114] env[68674]: _type = "Task" [ 760.062114] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.073038] env[68674]: DEBUG oslo_vmware.api [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': task-3239929, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.094935] env[68674]: DEBUG oslo_concurrency.lockutils [None req-71ee26b0-70c2-489a-aa90-4e98d73a2db8 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "505b0352-39ab-4841-8766-14626af2b13e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.406s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 760.115262] env[68674]: DEBUG oslo_concurrency.lockutils [req-bd07e3b6-1d23-4c5a-a3e4-8bab795c94ca req-c1722e79-1a7d-45f5-b69d-c68672e649b3 service nova] Releasing lock "refresh_cache-714142ec-89ad-44ab-8543-11493172a50b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 760.115565] env[68674]: DEBUG nova.compute.manager [req-bd07e3b6-1d23-4c5a-a3e4-8bab795c94ca req-c1722e79-1a7d-45f5-b69d-c68672e649b3 service nova] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Received event network-vif-deleted-52eba913-88d6-4c13-94bf-ad6cc7976b07 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 760.142272] env[68674]: DEBUG oslo_vmware.api [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524364cc-6dba-8f5b-496e-581dfadbbf85, 'name': SearchDatastore_Task, 'duration_secs': 0.009345} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.143202] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2166c07d-b655-42e8-9b0f-ded753de6516 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.149192] env[68674]: DEBUG oslo_vmware.api [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Waiting for the task: (returnval){ [ 760.149192] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52dc25e6-0ddd-4f79-6cd0-09cb74418d36" [ 760.149192] env[68674]: _type = "Task" [ 760.149192] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.158540] env[68674]: DEBUG oslo_vmware.api [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52dc25e6-0ddd-4f79-6cd0-09cb74418d36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.539057] env[68674]: DEBUG nova.network.neutron [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 760.591194] env[68674]: DEBUG oslo_vmware.api [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': task-3239929, 'name': Rename_Task, 'duration_secs': 0.313241} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.591194] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 760.591194] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6cf8256a-a988-4738-9dae-4a53f72ffa89 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.600024] env[68674]: DEBUG oslo_vmware.api [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Waiting for the task: (returnval){ [ 760.600024] env[68674]: value = "task-3239930" [ 760.600024] env[68674]: _type = "Task" [ 760.600024] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.612131] env[68674]: DEBUG oslo_vmware.api [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': task-3239930, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.662686] env[68674]: DEBUG oslo_vmware.api [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52dc25e6-0ddd-4f79-6cd0-09cb74418d36, 'name': SearchDatastore_Task, 'duration_secs': 0.010721} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.663019] env[68674]: DEBUG oslo_concurrency.lockutils [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 760.663327] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 714142ec-89ad-44ab-8543-11493172a50b/714142ec-89ad-44ab-8543-11493172a50b.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 760.663637] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c4f065c6-61ab-421b-910a-01987f46fddc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.670480] env[68674]: DEBUG oslo_vmware.api [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Waiting for the task: (returnval){ [ 760.670480] env[68674]: value = "task-3239931" [ 760.670480] env[68674]: _type = "Task" [ 760.670480] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.682778] env[68674]: DEBUG oslo_vmware.api [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Task: {'id': task-3239931, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.807092] env[68674]: DEBUG nova.compute.manager [req-dc33caa2-0344-44fe-afb1-d8ee1dec1f04 req-467df605-b228-4875-84fa-c413ace7d894 service nova] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Received event network-vif-plugged-fe21252e-0622-4932-bd5d-cd1a3e5ecdc4 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 760.807426] env[68674]: DEBUG oslo_concurrency.lockutils [req-dc33caa2-0344-44fe-afb1-d8ee1dec1f04 req-467df605-b228-4875-84fa-c413ace7d894 service nova] Acquiring lock "55727bbc-6b65-4e4c-ba4f-8240efbf052a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 760.807801] env[68674]: DEBUG oslo_concurrency.lockutils [req-dc33caa2-0344-44fe-afb1-d8ee1dec1f04 req-467df605-b228-4875-84fa-c413ace7d894 service nova] Lock "55727bbc-6b65-4e4c-ba4f-8240efbf052a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 760.808013] env[68674]: DEBUG oslo_concurrency.lockutils [req-dc33caa2-0344-44fe-afb1-d8ee1dec1f04 req-467df605-b228-4875-84fa-c413ace7d894 service nova] Lock "55727bbc-6b65-4e4c-ba4f-8240efbf052a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 760.808189] env[68674]: DEBUG nova.compute.manager [req-dc33caa2-0344-44fe-afb1-d8ee1dec1f04 req-467df605-b228-4875-84fa-c413ace7d894 service nova] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] No waiting events found dispatching network-vif-plugged-fe21252e-0622-4932-bd5d-cd1a3e5ecdc4 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 760.808352] env[68674]: WARNING nova.compute.manager [req-dc33caa2-0344-44fe-afb1-d8ee1dec1f04 req-467df605-b228-4875-84fa-c413ace7d894 service nova] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Received unexpected event network-vif-plugged-fe21252e-0622-4932-bd5d-cd1a3e5ecdc4 for instance with vm_state building and task_state spawning. [ 760.808623] env[68674]: DEBUG nova.compute.manager [req-dc33caa2-0344-44fe-afb1-d8ee1dec1f04 req-467df605-b228-4875-84fa-c413ace7d894 service nova] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Received event network-changed-fe21252e-0622-4932-bd5d-cd1a3e5ecdc4 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 760.808802] env[68674]: DEBUG nova.compute.manager [req-dc33caa2-0344-44fe-afb1-d8ee1dec1f04 req-467df605-b228-4875-84fa-c413ace7d894 service nova] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Refreshing instance network info cache due to event network-changed-fe21252e-0622-4932-bd5d-cd1a3e5ecdc4. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 760.811028] env[68674]: DEBUG oslo_concurrency.lockutils [req-dc33caa2-0344-44fe-afb1-d8ee1dec1f04 req-467df605-b228-4875-84fa-c413ace7d894 service nova] Acquiring lock "refresh_cache-55727bbc-6b65-4e4c-ba4f-8240efbf052a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.860927] env[68674]: DEBUG nova.network.neutron [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Updating instance_info_cache with network_info: [{"id": "fe21252e-0622-4932-bd5d-cd1a3e5ecdc4", "address": "fa:16:3e:13:d4:66", "network": {"id": "5674f315-1fc1-48df-8016-8dfaddb1bbf5", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-878396495-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c5be31196e1f452e8768b57c105d1765", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe21252e-06", "ovs_interfaceid": "fe21252e-0622-4932-bd5d-cd1a3e5ecdc4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.961143] env[68674]: DEBUG nova.network.neutron [req-a7942d6a-affa-425c-80a4-55c022ae6305 req-de384266-3522-4c9d-a136-4968ad618fd9 service nova] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Updated VIF entry in instance network info cache for port 3bad0946-10af-40d0-a8c2-a5469f09cf39. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 760.961143] env[68674]: DEBUG nova.network.neutron [req-a7942d6a-affa-425c-80a4-55c022ae6305 req-de384266-3522-4c9d-a136-4968ad618fd9 service nova] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Updating instance_info_cache with network_info: [{"id": "3bad0946-10af-40d0-a8c2-a5469f09cf39", "address": "fa:16:3e:92:b8:08", "network": {"id": "6e938754-bc83-4806-86a2-808cd64ac44a", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-632467165-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.166", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30b0da251a0d4f9c96f907b31ef9d5e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4d3f69a-b086-4c3b-b976-5a848b63dfc4", "external-id": "nsx-vlan-transportzone-627", "segmentation_id": 627, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3bad0946-10", "ovs_interfaceid": "3bad0946-10af-40d0-a8c2-a5469f09cf39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 761.112640] env[68674]: DEBUG oslo_vmware.api [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': task-3239930, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.184370] env[68674]: DEBUG oslo_vmware.api [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Task: {'id': task-3239931, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.191285] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-028798c0-675a-4e84-97bb-90f8ad99caa4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.199634] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-579a9938-bcc0-43ec-a127-f855fee41118 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.235513] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e87ee55-ffb4-408e-a8e1-1e88bb2d0125 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.244190] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8989a069-6380-431e-bf77-03e351f3b7f6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.260716] env[68674]: DEBUG nova.compute.provider_tree [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 761.366045] env[68674]: DEBUG oslo_concurrency.lockutils [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Releasing lock "refresh_cache-55727bbc-6b65-4e4c-ba4f-8240efbf052a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 761.366422] env[68674]: DEBUG nova.compute.manager [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Instance network_info: |[{"id": "fe21252e-0622-4932-bd5d-cd1a3e5ecdc4", "address": "fa:16:3e:13:d4:66", "network": {"id": "5674f315-1fc1-48df-8016-8dfaddb1bbf5", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-878396495-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c5be31196e1f452e8768b57c105d1765", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe21252e-06", "ovs_interfaceid": "fe21252e-0622-4932-bd5d-cd1a3e5ecdc4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 761.366817] env[68674]: DEBUG oslo_concurrency.lockutils [req-dc33caa2-0344-44fe-afb1-d8ee1dec1f04 req-467df605-b228-4875-84fa-c413ace7d894 service nova] Acquired lock "refresh_cache-55727bbc-6b65-4e4c-ba4f-8240efbf052a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 761.367068] env[68674]: DEBUG nova.network.neutron [req-dc33caa2-0344-44fe-afb1-d8ee1dec1f04 req-467df605-b228-4875-84fa-c413ace7d894 service nova] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Refreshing network info cache for port fe21252e-0622-4932-bd5d-cd1a3e5ecdc4 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 761.368362] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:13:d4:66', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f5f016d1-34a6-4ebd-81ed-a6bf9d109b87', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fe21252e-0622-4932-bd5d-cd1a3e5ecdc4', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 761.376722] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 761.377821] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 761.378067] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-41142a7e-fcea-4fd0-8a47-e36cc2716e84 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.395731] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a631a3f0-bce2-453b-a728-d0149548db73 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "3463e09e-dc2f-432c-9eff-8192c2616240" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.395978] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a631a3f0-bce2-453b-a728-d0149548db73 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "3463e09e-dc2f-432c-9eff-8192c2616240" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.396262] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a631a3f0-bce2-453b-a728-d0149548db73 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "3463e09e-dc2f-432c-9eff-8192c2616240-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.396462] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a631a3f0-bce2-453b-a728-d0149548db73 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "3463e09e-dc2f-432c-9eff-8192c2616240-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.396630] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a631a3f0-bce2-453b-a728-d0149548db73 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "3463e09e-dc2f-432c-9eff-8192c2616240-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 761.399252] env[68674]: INFO nova.compute.manager [None req-a631a3f0-bce2-453b-a728-d0149548db73 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Terminating instance [ 761.403873] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 761.403873] env[68674]: value = "task-3239932" [ 761.403873] env[68674]: _type = "Task" [ 761.403873] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.416766] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239932, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.463931] env[68674]: DEBUG oslo_concurrency.lockutils [req-a7942d6a-affa-425c-80a4-55c022ae6305 req-de384266-3522-4c9d-a136-4968ad618fd9 service nova] Releasing lock "refresh_cache-5e3f667c-5d3a-4465-9186-779563087480" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 761.611280] env[68674]: DEBUG oslo_vmware.api [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': task-3239930, 'name': PowerOnVM_Task, 'duration_secs': 0.85537} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.611856] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 761.611856] env[68674]: INFO nova.compute.manager [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Took 9.99 seconds to spawn the instance on the hypervisor. [ 761.612067] env[68674]: DEBUG nova.compute.manager [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 761.612917] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe621936-8df7-4c27-900d-5dc22dcfd19e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.683772] env[68674]: DEBUG oslo_vmware.api [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Task: {'id': task-3239931, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.683758} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.684075] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 714142ec-89ad-44ab-8543-11493172a50b/714142ec-89ad-44ab-8543-11493172a50b.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 761.684316] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 761.684594] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3960100d-a47f-4d8d-965c-60e0cdb00ffb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.691266] env[68674]: DEBUG oslo_vmware.api [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Waiting for the task: (returnval){ [ 761.691266] env[68674]: value = "task-3239933" [ 761.691266] env[68674]: _type = "Task" [ 761.691266] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.699728] env[68674]: DEBUG oslo_vmware.api [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Task: {'id': task-3239933, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.765351] env[68674]: DEBUG nova.scheduler.client.report [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 761.842639] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-28d5d064-72d5-483c-8e42-9cd170ff8d28 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Volume attach. Driver type: vmdk {{(pid=68674) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 761.842911] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-28d5d064-72d5-483c-8e42-9cd170ff8d28 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647529', 'volume_id': 'd79cd12d-5be5-4762-bb3d-a4b82e0afd67', 'name': 'volume-d79cd12d-5be5-4762-bb3d-a4b82e0afd67', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '503e9328-bbd8-414f-8bea-250ed8247d67', 'attached_at': '', 'detached_at': '', 'volume_id': 'd79cd12d-5be5-4762-bb3d-a4b82e0afd67', 'serial': 'd79cd12d-5be5-4762-bb3d-a4b82e0afd67'} {{(pid=68674) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 761.843859] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0e1aafd-b62f-41e0-a990-6ac00d3d15d7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.862759] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8525d2f-d1e3-4aff-bab6-ea1f298ec295 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.893985] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-28d5d064-72d5-483c-8e42-9cd170ff8d28 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Reconfiguring VM instance instance-0000000b to attach disk [datastore2] volume-d79cd12d-5be5-4762-bb3d-a4b82e0afd67/volume-d79cd12d-5be5-4762-bb3d-a4b82e0afd67.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 761.894335] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-017f446d-62b4-4ab6-b3b3-c1f40ab22e85 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.908582] env[68674]: DEBUG nova.compute.manager [None req-a631a3f0-bce2-453b-a728-d0149548db73 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 761.908751] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a631a3f0-bce2-453b-a728-d0149548db73 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 761.909573] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe0e3ea9-28e5-46ea-a7df-5a0c6a6fe670 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.919748] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-a631a3f0-bce2-453b-a728-d0149548db73 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 761.923389] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0b73b89d-7626-4401-ace7-c82d77958c00 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.925237] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239932, 'name': CreateVM_Task, 'duration_secs': 0.446612} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.925532] env[68674]: DEBUG oslo_vmware.api [None req-28d5d064-72d5-483c-8e42-9cd170ff8d28 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] Waiting for the task: (returnval){ [ 761.925532] env[68674]: value = "task-3239934" [ 761.925532] env[68674]: _type = "Task" [ 761.925532] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.925760] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 761.926715] env[68674]: DEBUG oslo_concurrency.lockutils [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.926880] env[68674]: DEBUG oslo_concurrency.lockutils [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 761.927201] env[68674]: DEBUG oslo_concurrency.lockutils [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 761.931150] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8cb315ac-bae1-41a3-9775-47539d511378 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.935109] env[68674]: DEBUG oslo_vmware.api [None req-a631a3f0-bce2-453b-a728-d0149548db73 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 761.935109] env[68674]: value = "task-3239935" [ 761.935109] env[68674]: _type = "Task" [ 761.935109] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.944047] env[68674]: DEBUG oslo_vmware.api [None req-28d5d064-72d5-483c-8e42-9cd170ff8d28 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] Task: {'id': task-3239934, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.944412] env[68674]: DEBUG oslo_vmware.api [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 761.944412] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52474847-5f64-53f4-42b7-13da1a573b1f" [ 761.944412] env[68674]: _type = "Task" [ 761.944412] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.952277] env[68674]: DEBUG oslo_vmware.api [None req-a631a3f0-bce2-453b-a728-d0149548db73 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239935, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.958823] env[68674]: DEBUG oslo_vmware.api [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52474847-5f64-53f4-42b7-13da1a573b1f, 'name': SearchDatastore_Task, 'duration_secs': 0.010545} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.962015] env[68674]: DEBUG oslo_concurrency.lockutils [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 761.962315] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 761.962623] env[68674]: DEBUG oslo_concurrency.lockutils [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.962796] env[68674]: DEBUG oslo_concurrency.lockutils [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 761.962987] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 761.963348] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ec22c2ed-83b4-40df-aa46-b7d784898cec {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.973118] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 761.973743] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 761.974111] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e5274fa-2887-49b1-a052-3a6d61df27e0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.979766] env[68674]: DEBUG oslo_vmware.api [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 761.979766] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52860b15-b14f-6586-fa0d-78d03468f529" [ 761.979766] env[68674]: _type = "Task" [ 761.979766] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.988391] env[68674]: DEBUG oslo_vmware.api [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52860b15-b14f-6586-fa0d-78d03468f529, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.129512] env[68674]: INFO nova.compute.manager [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Took 47.39 seconds to build instance. [ 762.201962] env[68674]: DEBUG oslo_vmware.api [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Task: {'id': task-3239933, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070946} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.203079] env[68674]: DEBUG nova.network.neutron [req-dc33caa2-0344-44fe-afb1-d8ee1dec1f04 req-467df605-b228-4875-84fa-c413ace7d894 service nova] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Updated VIF entry in instance network info cache for port fe21252e-0622-4932-bd5d-cd1a3e5ecdc4. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 762.203449] env[68674]: DEBUG nova.network.neutron [req-dc33caa2-0344-44fe-afb1-d8ee1dec1f04 req-467df605-b228-4875-84fa-c413ace7d894 service nova] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Updating instance_info_cache with network_info: [{"id": "fe21252e-0622-4932-bd5d-cd1a3e5ecdc4", "address": "fa:16:3e:13:d4:66", "network": {"id": "5674f315-1fc1-48df-8016-8dfaddb1bbf5", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-878396495-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c5be31196e1f452e8768b57c105d1765", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe21252e-06", "ovs_interfaceid": "fe21252e-0622-4932-bd5d-cd1a3e5ecdc4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.204717] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 762.206049] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d82ea05-7873-4e50-8d50-35a7dfec911d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.234478] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Reconfiguring VM instance instance-00000033 to attach disk [datastore2] 714142ec-89ad-44ab-8543-11493172a50b/714142ec-89ad-44ab-8543-11493172a50b.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 762.235258] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-474c3d32-ec07-422d-8ef9-2dbe5901fbc3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.259205] env[68674]: DEBUG oslo_vmware.api [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Waiting for the task: (returnval){ [ 762.259205] env[68674]: value = "task-3239936" [ 762.259205] env[68674]: _type = "Task" [ 762.259205] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.268456] env[68674]: DEBUG oslo_vmware.api [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Task: {'id': task-3239936, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.270396] env[68674]: DEBUG oslo_concurrency.lockutils [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.710s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 762.270937] env[68674]: DEBUG nova.compute.manager [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 762.273803] env[68674]: DEBUG oslo_concurrency.lockutils [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.518s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 762.274050] env[68674]: DEBUG nova.objects.instance [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lazy-loading 'resources' on Instance uuid d88ccf9b-7432-4be0-82f7-b2a9155f7d86 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 762.437356] env[68674]: DEBUG oslo_vmware.api [None req-28d5d064-72d5-483c-8e42-9cd170ff8d28 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] Task: {'id': task-3239934, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.446362] env[68674]: DEBUG oslo_vmware.api [None req-a631a3f0-bce2-453b-a728-d0149548db73 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239935, 'name': PowerOffVM_Task, 'duration_secs': 0.318447} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.446675] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-a631a3f0-bce2-453b-a728-d0149548db73 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 762.446861] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a631a3f0-bce2-453b-a728-d0149548db73 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 762.447133] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-08c089f9-8e86-4582-adc4-0819a9e216d3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.490762] env[68674]: DEBUG oslo_vmware.api [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52860b15-b14f-6586-fa0d-78d03468f529, 'name': SearchDatastore_Task, 'duration_secs': 0.011421} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.491696] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-afcc49bf-51fc-4629-a2d6-664a3f08de17 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.497791] env[68674]: DEBUG oslo_vmware.api [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 762.497791] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]520a6823-9cb9-c935-4b8b-8f35e568b0d9" [ 762.497791] env[68674]: _type = "Task" [ 762.497791] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.506212] env[68674]: DEBUG oslo_vmware.api [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]520a6823-9cb9-c935-4b8b-8f35e568b0d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.518207] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a631a3f0-bce2-453b-a728-d0149548db73 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 762.518619] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a631a3f0-bce2-453b-a728-d0149548db73 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 762.518836] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-a631a3f0-bce2-453b-a728-d0149548db73 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Deleting the datastore file [datastore1] 3463e09e-dc2f-432c-9eff-8192c2616240 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 762.519886] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4b4c38c6-ff2e-4480-914a-a2c034054703 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.533448] env[68674]: DEBUG oslo_vmware.api [None req-a631a3f0-bce2-453b-a728-d0149548db73 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 762.533448] env[68674]: value = "task-3239938" [ 762.533448] env[68674]: _type = "Task" [ 762.533448] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.545953] env[68674]: DEBUG oslo_vmware.api [None req-a631a3f0-bce2-453b-a728-d0149548db73 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239938, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.634572] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7c743af0-fd8c-4e32-a0f3-7e31f27c6335 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Lock "2007222e-e4e5-44b3-bd9e-55b4a2143c3e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 113.527s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 762.705917] env[68674]: DEBUG oslo_concurrency.lockutils [req-dc33caa2-0344-44fe-afb1-d8ee1dec1f04 req-467df605-b228-4875-84fa-c413ace7d894 service nova] Releasing lock "refresh_cache-55727bbc-6b65-4e4c-ba4f-8240efbf052a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 762.745852] env[68674]: DEBUG oslo_vmware.rw_handles [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526e0487-99b3-2c26-2a53-a215cfa1185e/disk-0.vmdk. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 762.746785] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1846d77-4c6f-4296-8630-c0bddb50ff38 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.759015] env[68674]: DEBUG oslo_vmware.rw_handles [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526e0487-99b3-2c26-2a53-a215cfa1185e/disk-0.vmdk is in state: ready. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 762.759225] env[68674]: ERROR oslo_vmware.rw_handles [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526e0487-99b3-2c26-2a53-a215cfa1185e/disk-0.vmdk due to incomplete transfer. [ 762.759533] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-e3697191-8025-4ac7-bb53-073d7b7ee441 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.769898] env[68674]: DEBUG oslo_vmware.api [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Task: {'id': task-3239936, 'name': ReconfigVM_Task, 'duration_secs': 0.34883} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.771084] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Reconfigured VM instance instance-00000033 to attach disk [datastore2] 714142ec-89ad-44ab-8543-11493172a50b/714142ec-89ad-44ab-8543-11493172a50b.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 762.771756] env[68674]: DEBUG oslo_vmware.rw_handles [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526e0487-99b3-2c26-2a53-a215cfa1185e/disk-0.vmdk. {{(pid=68674) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 762.771935] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Uploaded image c5ef204e-070f-42a2-912e-3be2b0141452 to the Glance image server {{(pid=68674) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 762.774070] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Destroying the VM {{(pid=68674) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 762.774828] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c979b417-3fae-4d73-afa1-19faa633e527 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.776660] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c8fd42b3-1eb4-4b07-90a0-1c47f6f0ef50 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.778948] env[68674]: DEBUG nova.compute.utils [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 762.783470] env[68674]: DEBUG nova.compute.manager [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 762.783709] env[68674]: DEBUG nova.network.neutron [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 762.791773] env[68674]: DEBUG oslo_vmware.api [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for the task: (returnval){ [ 762.791773] env[68674]: value = "task-3239940" [ 762.791773] env[68674]: _type = "Task" [ 762.791773] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.793708] env[68674]: DEBUG oslo_vmware.api [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Waiting for the task: (returnval){ [ 762.793708] env[68674]: value = "task-3239939" [ 762.793708] env[68674]: _type = "Task" [ 762.793708] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.808140] env[68674]: DEBUG oslo_vmware.api [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239940, 'name': Destroy_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.811492] env[68674]: DEBUG oslo_vmware.api [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Task: {'id': task-3239939, 'name': Rename_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.853097] env[68674]: DEBUG nova.policy [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0bc8df4f271e4330b3874e04b792a537', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a84d9d6e23bd40049c34e6f087252b4e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 762.937374] env[68674]: DEBUG oslo_vmware.api [None req-28d5d064-72d5-483c-8e42-9cd170ff8d28 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] Task: {'id': task-3239934, 'name': ReconfigVM_Task, 'duration_secs': 0.668702} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.938521] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-28d5d064-72d5-483c-8e42-9cd170ff8d28 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Reconfigured VM instance instance-0000000b to attach disk [datastore2] volume-d79cd12d-5be5-4762-bb3d-a4b82e0afd67/volume-d79cd12d-5be5-4762-bb3d-a4b82e0afd67.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 762.945683] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e1272fd-dc10-40a3-8659-15f82caa4e46 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.967999] env[68674]: DEBUG oslo_vmware.api [None req-28d5d064-72d5-483c-8e42-9cd170ff8d28 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] Waiting for the task: (returnval){ [ 762.967999] env[68674]: value = "task-3239941" [ 762.967999] env[68674]: _type = "Task" [ 762.967999] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.979230] env[68674]: DEBUG oslo_vmware.api [None req-28d5d064-72d5-483c-8e42-9cd170ff8d28 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] Task: {'id': task-3239941, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.018037] env[68674]: DEBUG oslo_vmware.api [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]520a6823-9cb9-c935-4b8b-8f35e568b0d9, 'name': SearchDatastore_Task, 'duration_secs': 0.011307} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.018037] env[68674]: DEBUG oslo_concurrency.lockutils [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 763.018037] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 55727bbc-6b65-4e4c-ba4f-8240efbf052a/55727bbc-6b65-4e4c-ba4f-8240efbf052a.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 763.018037] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0bdbed5d-8d24-4a3d-8f8c-4e25dad3a08a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.020630] env[68674]: DEBUG oslo_vmware.api [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 763.020630] env[68674]: value = "task-3239942" [ 763.020630] env[68674]: _type = "Task" [ 763.020630] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.032023] env[68674]: DEBUG oslo_vmware.api [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239942, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.032023] env[68674]: DEBUG nova.compute.manager [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 763.032023] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8334ed82-51a3-4711-bcd4-dd8917f2f92c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.052740] env[68674]: DEBUG oslo_vmware.api [None req-a631a3f0-bce2-453b-a728-d0149548db73 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3239938, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150509} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.053254] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-a631a3f0-bce2-453b-a728-d0149548db73 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 763.053438] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a631a3f0-bce2-453b-a728-d0149548db73 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 763.053613] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a631a3f0-bce2-453b-a728-d0149548db73 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 763.053784] env[68674]: INFO nova.compute.manager [None req-a631a3f0-bce2-453b-a728-d0149548db73 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Took 1.15 seconds to destroy the instance on the hypervisor. [ 763.060014] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a631a3f0-bce2-453b-a728-d0149548db73 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 763.060014] env[68674]: DEBUG nova.compute.manager [-] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 763.060014] env[68674]: DEBUG nova.network.neutron [-] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 763.136866] env[68674]: DEBUG nova.compute.manager [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 763.257302] env[68674]: INFO nova.compute.manager [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Rescuing [ 763.257623] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Acquiring lock "refresh_cache-2007222e-e4e5-44b3-bd9e-55b4a2143c3e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.257719] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Acquired lock "refresh_cache-2007222e-e4e5-44b3-bd9e-55b4a2143c3e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 763.257887] env[68674]: DEBUG nova.network.neutron [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 763.284507] env[68674]: DEBUG nova.compute.manager [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 763.302722] env[68674]: DEBUG oslo_vmware.api [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239940, 'name': Destroy_Task} progress is 33%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.316800] env[68674]: DEBUG oslo_vmware.api [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Task: {'id': task-3239939, 'name': Rename_Task, 'duration_secs': 0.143957} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.316800] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 763.316800] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bed1065d-d902-4f04-a667-e6f8f6b43aa6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.321094] env[68674]: DEBUG oslo_vmware.api [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Waiting for the task: (returnval){ [ 763.321094] env[68674]: value = "task-3239943" [ 763.321094] env[68674]: _type = "Task" [ 763.321094] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.336033] env[68674]: DEBUG oslo_vmware.api [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Task: {'id': task-3239943, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.420027] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b874136e-cd5e-4071-9edc-c99a5dfaa962 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.433150] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03276a36-1a53-4dd5-8465-c92d2c495c13 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.468368] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-470347f2-58ec-4020-8c0b-0b140498e739 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.483515] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47a94461-1ea6-4be4-a4a1-f5ab8ac2a765 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.488809] env[68674]: DEBUG oslo_vmware.api [None req-28d5d064-72d5-483c-8e42-9cd170ff8d28 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] Task: {'id': task-3239941, 'name': ReconfigVM_Task, 'duration_secs': 0.275018} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.489263] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-28d5d064-72d5-483c-8e42-9cd170ff8d28 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647529', 'volume_id': 'd79cd12d-5be5-4762-bb3d-a4b82e0afd67', 'name': 'volume-d79cd12d-5be5-4762-bb3d-a4b82e0afd67', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '503e9328-bbd8-414f-8bea-250ed8247d67', 'attached_at': '', 'detached_at': '', 'volume_id': 'd79cd12d-5be5-4762-bb3d-a4b82e0afd67', 'serial': 'd79cd12d-5be5-4762-bb3d-a4b82e0afd67'} {{(pid=68674) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 763.508115] env[68674]: DEBUG nova.compute.provider_tree [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 763.532275] env[68674]: DEBUG oslo_vmware.api [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239942, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.555324] env[68674]: INFO nova.compute.manager [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] instance snapshotting [ 763.556168] env[68674]: DEBUG nova.objects.instance [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lazy-loading 'flavor' on Instance uuid 3d85c8c4-f09c-4f75-aff5-9a49d84ae006 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 763.668573] env[68674]: DEBUG nova.network.neutron [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Successfully created port: eb762aca-5fbd-45f4-a81e-77d2c5d7aaf7 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 763.681877] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 763.805868] env[68674]: DEBUG oslo_vmware.api [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239940, 'name': Destroy_Task, 'duration_secs': 0.744747} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.806178] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Destroyed the VM [ 763.806527] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Deleting Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 763.806841] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-af0a5542-fb01-45f1-bbee-747809559c5f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.814824] env[68674]: DEBUG oslo_vmware.api [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for the task: (returnval){ [ 763.814824] env[68674]: value = "task-3239944" [ 763.814824] env[68674]: _type = "Task" [ 763.814824] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.827303] env[68674]: DEBUG oslo_vmware.api [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239944, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.835611] env[68674]: DEBUG oslo_vmware.api [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Task: {'id': task-3239943, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.044684] env[68674]: DEBUG oslo_vmware.api [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239942, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.681395} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.045014] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 55727bbc-6b65-4e4c-ba4f-8240efbf052a/55727bbc-6b65-4e4c-ba4f-8240efbf052a.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 764.045291] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 764.045572] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-96a88e87-92c6-49af-97b7-bd4b11bfe8e1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.050023] env[68674]: ERROR nova.scheduler.client.report [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [req-44039c0e-d346-46bb-81a5-8c3479cc3e19] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ade3f042-7427-494b-9654-0b65e074850c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-44039c0e-d346-46bb-81a5-8c3479cc3e19"}]} [ 764.054466] env[68674]: DEBUG oslo_vmware.api [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 764.054466] env[68674]: value = "task-3239945" [ 764.054466] env[68674]: _type = "Task" [ 764.054466] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.059822] env[68674]: DEBUG nova.compute.manager [req-85dfcad5-5a54-48da-b21d-959fb5d23d14 req-2a72e8a2-da26-420e-9a09-526f803d3b80 service nova] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Received event network-vif-deleted-b8d668c5-2f06-454e-842e-fdbe52dffa5e {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 764.060047] env[68674]: INFO nova.compute.manager [req-85dfcad5-5a54-48da-b21d-959fb5d23d14 req-2a72e8a2-da26-420e-9a09-526f803d3b80 service nova] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Neutron deleted interface b8d668c5-2f06-454e-842e-fdbe52dffa5e; detaching it from the instance and deleting it from the info cache [ 764.060277] env[68674]: DEBUG nova.network.neutron [req-85dfcad5-5a54-48da-b21d-959fb5d23d14 req-2a72e8a2-da26-420e-9a09-526f803d3b80 service nova] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.079628] env[68674]: DEBUG oslo_vmware.api [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239945, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.083553] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47cfcb1d-01be-4147-89fa-6724f714ee8d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.090984] env[68674]: DEBUG nova.scheduler.client.report [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Refreshing inventories for resource provider ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 764.119756] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61e8d2db-47c9-45a4-a943-48067eb88d8f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.164031] env[68674]: DEBUG nova.scheduler.client.report [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Updating ProviderTree inventory for provider ade3f042-7427-494b-9654-0b65e074850c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 764.164461] env[68674]: DEBUG nova.compute.provider_tree [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 764.184091] env[68674]: DEBUG nova.scheduler.client.report [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Refreshing aggregate associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, aggregates: None {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 764.219430] env[68674]: DEBUG nova.scheduler.client.report [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Refreshing trait associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 764.282317] env[68674]: DEBUG nova.network.neutron [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Updating instance_info_cache with network_info: [{"id": "71dd9ef4-7bf9-4f8c-a04c-23431a1d8112", "address": "fa:16:3e:f6:e7:1f", "network": {"id": "57a5ae11-4f33-4f74-9756-ee7a71e40eea", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1400887908-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "40404e0b9c0042c58bc22c96799709af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "604056d6-6dd6-47fa-9eaa-6863a3a7c488", "external-id": "nsx-vlan-transportzone-287", "segmentation_id": 287, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71dd9ef4-7b", "ovs_interfaceid": "71dd9ef4-7bf9-4f8c-a04c-23431a1d8112", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.303756] env[68674]: DEBUG nova.compute.manager [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 764.326271] env[68674]: DEBUG oslo_vmware.api [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239944, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.342015] env[68674]: DEBUG oslo_vmware.api [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Task: {'id': task-3239943, 'name': PowerOnVM_Task, 'duration_secs': 0.904163} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.342015] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 764.342015] env[68674]: INFO nova.compute.manager [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Took 8.88 seconds to spawn the instance on the hypervisor. [ 764.342015] env[68674]: DEBUG nova.compute.manager [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 764.342015] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67d6f5ec-bc79-400e-a7de-a5e3605a8a46 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.345431] env[68674]: DEBUG nova.network.neutron [-] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.376507] env[68674]: DEBUG nova.virt.hardware [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 764.376690] env[68674]: DEBUG nova.virt.hardware [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 764.376840] env[68674]: DEBUG nova.virt.hardware [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 764.377036] env[68674]: DEBUG nova.virt.hardware [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 764.377190] env[68674]: DEBUG nova.virt.hardware [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 764.377367] env[68674]: DEBUG nova.virt.hardware [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 764.377631] env[68674]: DEBUG nova.virt.hardware [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 764.377824] env[68674]: DEBUG nova.virt.hardware [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 764.378079] env[68674]: DEBUG nova.virt.hardware [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 764.378208] env[68674]: DEBUG nova.virt.hardware [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 764.378390] env[68674]: DEBUG nova.virt.hardware [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 764.379636] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f04aa5c5-dbdc-4671-943f-50fb85b0a257 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.391303] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5963b8d8-0780-4591-ba5a-e0dfc753d630 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.570206] env[68674]: DEBUG oslo_vmware.api [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239945, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.111122} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.573083] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 764.574736] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6c0ef1d-fa2b-40ee-bec0-072216fd633b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.577294] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4f86fcf1-fb89-4993-8239-bda0d194b595 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.590864] env[68674]: DEBUG nova.objects.instance [None req-28d5d064-72d5-483c-8e42-9cd170ff8d28 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] Lazy-loading 'flavor' on Instance uuid 503e9328-bbd8-414f-8bea-250ed8247d67 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 764.600641] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Reconfiguring VM instance instance-00000034 to attach disk [datastore2] 55727bbc-6b65-4e4c-ba4f-8240efbf052a/55727bbc-6b65-4e4c-ba4f-8240efbf052a.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 764.604733] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8266b434-9bfb-42c1-8b41-8c9accde392f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.626466] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-380ebaab-d84f-473f-8f16-3eed99aaf980 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.644183] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Creating Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 764.644183] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c2d8fb4f-c06f-4364-93c5-a6a39d2db0f8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.645284] env[68674]: DEBUG oslo_vmware.api [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 764.645284] env[68674]: value = "task-3239946" [ 764.645284] env[68674]: _type = "Task" [ 764.645284] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.651148] env[68674]: DEBUG oslo_vmware.api [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 764.651148] env[68674]: value = "task-3239947" [ 764.651148] env[68674]: _type = "Task" [ 764.651148] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.659913] env[68674]: DEBUG oslo_vmware.api [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239946, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.677716] env[68674]: DEBUG nova.compute.manager [req-85dfcad5-5a54-48da-b21d-959fb5d23d14 req-2a72e8a2-da26-420e-9a09-526f803d3b80 service nova] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Detach interface failed, port_id=b8d668c5-2f06-454e-842e-fdbe52dffa5e, reason: Instance 3463e09e-dc2f-432c-9eff-8192c2616240 could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 764.687231] env[68674]: DEBUG oslo_vmware.api [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239947, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.786795] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Releasing lock "refresh_cache-2007222e-e4e5-44b3-bd9e-55b4a2143c3e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 764.841294] env[68674]: DEBUG oslo_vmware.api [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239944, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.856139] env[68674]: INFO nova.compute.manager [-] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Took 1.80 seconds to deallocate network for instance. [ 764.875843] env[68674]: INFO nova.compute.manager [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Took 43.37 seconds to build instance. [ 764.990453] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d4d3a76-8b42-4e9b-98d1-8068513d4cd7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.999072] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68de421a-5283-4732-87bb-03c4920f941e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.032379] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55feca37-927d-4663-be5f-37951acc2339 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.040758] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a60ec831-6a84-4f82-b452-71c137fffd52 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.056920] env[68674]: DEBUG nova.compute.provider_tree [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 765.108518] env[68674]: DEBUG oslo_concurrency.lockutils [None req-28d5d064-72d5-483c-8e42-9cd170ff8d28 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] Lock "503e9328-bbd8-414f-8bea-250ed8247d67" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.891s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 765.160229] env[68674]: DEBUG oslo_vmware.api [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239946, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.170094] env[68674]: DEBUG oslo_vmware.api [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239947, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.327728] env[68674]: DEBUG oslo_vmware.api [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239944, 'name': RemoveSnapshot_Task, 'duration_secs': 1.333107} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.329574] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Deleted Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 765.329888] env[68674]: INFO nova.compute.manager [None req-95f0be57-93a3-40b9-9e89-f59606fd7ae2 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Took 17.42 seconds to snapshot the instance on the hypervisor. [ 765.379030] env[68674]: DEBUG oslo_concurrency.lockutils [None req-87f4413c-6b1b-47f5-a6ac-605ab931dfe1 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Lock "714142ec-89ad-44ab-8543-11493172a50b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 108.988s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 765.379995] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a631a3f0-bce2-453b-a728-d0149548db73 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 765.606382] env[68674]: DEBUG nova.scheduler.client.report [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Updated inventory for provider ade3f042-7427-494b-9654-0b65e074850c with generation 77 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 765.606657] env[68674]: DEBUG nova.compute.provider_tree [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Updating resource provider ade3f042-7427-494b-9654-0b65e074850c generation from 77 to 78 during operation: update_inventory {{(pid=68674) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 765.606845] env[68674]: DEBUG nova.compute.provider_tree [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 765.658016] env[68674]: DEBUG oslo_vmware.api [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239946, 'name': ReconfigVM_Task, 'duration_secs': 0.639246} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.658371] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Reconfigured VM instance instance-00000034 to attach disk [datastore2] 55727bbc-6b65-4e4c-ba4f-8240efbf052a/55727bbc-6b65-4e4c-ba4f-8240efbf052a.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 765.659091] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d1b8ece4-a88a-4793-9a85-f35668520f4a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.668755] env[68674]: DEBUG oslo_vmware.api [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239947, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.669986] env[68674]: DEBUG oslo_vmware.api [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 765.669986] env[68674]: value = "task-3239948" [ 765.669986] env[68674]: _type = "Task" [ 765.669986] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.681428] env[68674]: DEBUG oslo_vmware.api [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239948, 'name': Rename_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.884486] env[68674]: DEBUG nova.compute.manager [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 765.957203] env[68674]: DEBUG nova.network.neutron [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Successfully updated port: eb762aca-5fbd-45f4-a81e-77d2c5d7aaf7 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 766.087142] env[68674]: DEBUG nova.compute.manager [req-eb922912-6826-40d1-9a07-56a3e0cd002c req-1425aba0-40d9-4fa4-92b2-63cd949d9c61 service nova] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Received event network-vif-plugged-eb762aca-5fbd-45f4-a81e-77d2c5d7aaf7 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 766.087142] env[68674]: DEBUG oslo_concurrency.lockutils [req-eb922912-6826-40d1-9a07-56a3e0cd002c req-1425aba0-40d9-4fa4-92b2-63cd949d9c61 service nova] Acquiring lock "f029042f-d80b-453e-adc9-1e65d7da7aaf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 766.087142] env[68674]: DEBUG oslo_concurrency.lockutils [req-eb922912-6826-40d1-9a07-56a3e0cd002c req-1425aba0-40d9-4fa4-92b2-63cd949d9c61 service nova] Lock "f029042f-d80b-453e-adc9-1e65d7da7aaf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 766.087142] env[68674]: DEBUG oslo_concurrency.lockutils [req-eb922912-6826-40d1-9a07-56a3e0cd002c req-1425aba0-40d9-4fa4-92b2-63cd949d9c61 service nova] Lock "f029042f-d80b-453e-adc9-1e65d7da7aaf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 766.087142] env[68674]: DEBUG nova.compute.manager [req-eb922912-6826-40d1-9a07-56a3e0cd002c req-1425aba0-40d9-4fa4-92b2-63cd949d9c61 service nova] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] No waiting events found dispatching network-vif-plugged-eb762aca-5fbd-45f4-a81e-77d2c5d7aaf7 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 766.087391] env[68674]: WARNING nova.compute.manager [req-eb922912-6826-40d1-9a07-56a3e0cd002c req-1425aba0-40d9-4fa4-92b2-63cd949d9c61 service nova] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Received unexpected event network-vif-plugged-eb762aca-5fbd-45f4-a81e-77d2c5d7aaf7 for instance with vm_state building and task_state spawning. [ 766.087540] env[68674]: DEBUG nova.compute.manager [req-eb922912-6826-40d1-9a07-56a3e0cd002c req-1425aba0-40d9-4fa4-92b2-63cd949d9c61 service nova] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Received event network-changed-eb762aca-5fbd-45f4-a81e-77d2c5d7aaf7 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 766.087705] env[68674]: DEBUG nova.compute.manager [req-eb922912-6826-40d1-9a07-56a3e0cd002c req-1425aba0-40d9-4fa4-92b2-63cd949d9c61 service nova] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Refreshing instance network info cache due to event network-changed-eb762aca-5fbd-45f4-a81e-77d2c5d7aaf7. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 766.087895] env[68674]: DEBUG oslo_concurrency.lockutils [req-eb922912-6826-40d1-9a07-56a3e0cd002c req-1425aba0-40d9-4fa4-92b2-63cd949d9c61 service nova] Acquiring lock "refresh_cache-f029042f-d80b-453e-adc9-1e65d7da7aaf" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.088225] env[68674]: DEBUG oslo_concurrency.lockutils [req-eb922912-6826-40d1-9a07-56a3e0cd002c req-1425aba0-40d9-4fa4-92b2-63cd949d9c61 service nova] Acquired lock "refresh_cache-f029042f-d80b-453e-adc9-1e65d7da7aaf" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 766.088423] env[68674]: DEBUG nova.network.neutron [req-eb922912-6826-40d1-9a07-56a3e0cd002c req-1425aba0-40d9-4fa4-92b2-63cd949d9c61 service nova] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Refreshing network info cache for port eb762aca-5fbd-45f4-a81e-77d2c5d7aaf7 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 766.114820] env[68674]: DEBUG oslo_concurrency.lockutils [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.840s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 766.116477] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3064beae-94a2-4818-acbd-5ce58540e841 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.885s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 766.116735] env[68674]: DEBUG nova.objects.instance [None req-3064beae-94a2-4818-acbd-5ce58540e841 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Lazy-loading 'resources' on Instance uuid 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 766.172198] env[68674]: DEBUG oslo_vmware.api [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239947, 'name': CreateSnapshot_Task, 'duration_secs': 1.244078} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.173206] env[68674]: INFO nova.scheduler.client.report [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Deleted allocations for instance d88ccf9b-7432-4be0-82f7-b2a9155f7d86 [ 766.178244] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Created Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 766.181229] env[68674]: DEBUG oslo_concurrency.lockutils [None req-42c1f8bd-3564-46f5-83b0-b8ae2cd36987 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] Acquiring lock "503e9328-bbd8-414f-8bea-250ed8247d67" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 766.181526] env[68674]: DEBUG oslo_concurrency.lockutils [None req-42c1f8bd-3564-46f5-83b0-b8ae2cd36987 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] Lock "503e9328-bbd8-414f-8bea-250ed8247d67" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 766.183567] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68cc7bfa-e916-4c72-bdd8-c9fca0c42403 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.194494] env[68674]: DEBUG oslo_vmware.api [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239948, 'name': Rename_Task, 'duration_secs': 0.145565} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.198483] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 766.198768] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d0f848ae-16d8-47c2-95de-51d332e88ce7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.208183] env[68674]: DEBUG oslo_vmware.api [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 766.208183] env[68674]: value = "task-3239949" [ 766.208183] env[68674]: _type = "Task" [ 766.208183] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.216276] env[68674]: DEBUG oslo_vmware.api [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239949, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.339372] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 766.339931] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-234b8fd2-4d28-433e-9c72-06625198c7c5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.348089] env[68674]: DEBUG oslo_vmware.api [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Waiting for the task: (returnval){ [ 766.348089] env[68674]: value = "task-3239950" [ 766.348089] env[68674]: _type = "Task" [ 766.348089] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.356706] env[68674]: DEBUG oslo_vmware.api [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': task-3239950, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.403816] env[68674]: DEBUG oslo_concurrency.lockutils [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 766.461177] env[68674]: DEBUG oslo_concurrency.lockutils [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquiring lock "refresh_cache-f029042f-d80b-453e-adc9-1e65d7da7aaf" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.557452] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e86f83c0-80d1-438c-9fc0-9827393eb5f7 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Acquiring lock "714142ec-89ad-44ab-8543-11493172a50b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 766.557764] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e86f83c0-80d1-438c-9fc0-9827393eb5f7 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Lock "714142ec-89ad-44ab-8543-11493172a50b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 766.558010] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e86f83c0-80d1-438c-9fc0-9827393eb5f7 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Acquiring lock "714142ec-89ad-44ab-8543-11493172a50b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 766.558281] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e86f83c0-80d1-438c-9fc0-9827393eb5f7 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Lock "714142ec-89ad-44ab-8543-11493172a50b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 766.558527] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e86f83c0-80d1-438c-9fc0-9827393eb5f7 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Lock "714142ec-89ad-44ab-8543-11493172a50b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 766.561035] env[68674]: INFO nova.compute.manager [None req-e86f83c0-80d1-438c-9fc0-9827393eb5f7 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Terminating instance [ 766.686364] env[68674]: INFO nova.compute.manager [None req-42c1f8bd-3564-46f5-83b0-b8ae2cd36987 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Detaching volume d79cd12d-5be5-4762-bb3d-a4b82e0afd67 [ 766.688599] env[68674]: DEBUG oslo_concurrency.lockutils [None req-206517ff-acd8-4230-b84f-bbd6759bf8bd tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "d88ccf9b-7432-4be0-82f7-b2a9155f7d86" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.402s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 766.693478] env[68674]: DEBUG nova.network.neutron [req-eb922912-6826-40d1-9a07-56a3e0cd002c req-1425aba0-40d9-4fa4-92b2-63cd949d9c61 service nova] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 766.708063] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Creating linked-clone VM from snapshot {{(pid=68674) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 766.711024] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-b6f4ff6c-511d-4176-8c8c-35ce9b6c98e0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.727963] env[68674]: DEBUG oslo_vmware.api [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239949, 'name': PowerOnVM_Task, 'duration_secs': 0.503651} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.730218] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 766.730438] env[68674]: INFO nova.compute.manager [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Took 8.46 seconds to spawn the instance on the hypervisor. [ 766.730619] env[68674]: DEBUG nova.compute.manager [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 766.731256] env[68674]: DEBUG oslo_vmware.api [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 766.731256] env[68674]: value = "task-3239951" [ 766.731256] env[68674]: _type = "Task" [ 766.731256] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.734110] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0989673-650f-4ac1-8283-095180b37389 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.749103] env[68674]: DEBUG oslo_vmware.api [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239951, 'name': CloneVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.773394] env[68674]: INFO nova.virt.block_device [None req-42c1f8bd-3564-46f5-83b0-b8ae2cd36987 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Attempting to driver detach volume d79cd12d-5be5-4762-bb3d-a4b82e0afd67 from mountpoint /dev/sdb [ 766.773482] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-42c1f8bd-3564-46f5-83b0-b8ae2cd36987 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Volume detach. Driver type: vmdk {{(pid=68674) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 766.773694] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-42c1f8bd-3564-46f5-83b0-b8ae2cd36987 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647529', 'volume_id': 'd79cd12d-5be5-4762-bb3d-a4b82e0afd67', 'name': 'volume-d79cd12d-5be5-4762-bb3d-a4b82e0afd67', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '503e9328-bbd8-414f-8bea-250ed8247d67', 'attached_at': '', 'detached_at': '', 'volume_id': 'd79cd12d-5be5-4762-bb3d-a4b82e0afd67', 'serial': 'd79cd12d-5be5-4762-bb3d-a4b82e0afd67'} {{(pid=68674) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 766.777586] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c2b6279-04e9-4dd1-acac-bb799cf91212 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.809640] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-523d7809-575a-40d6-89de-00cb137419b9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.820962] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e675877-0784-4731-ba1e-5d9c921c5f92 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.851917] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d71ae6df-7292-450a-95da-d05a52aa29ab {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.862426] env[68674]: DEBUG oslo_vmware.api [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': task-3239950, 'name': PowerOffVM_Task, 'duration_secs': 0.249805} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.873731] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 766.874604] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-42c1f8bd-3564-46f5-83b0-b8ae2cd36987 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] The volume has not been displaced from its original location: [datastore2] volume-d79cd12d-5be5-4762-bb3d-a4b82e0afd67/volume-d79cd12d-5be5-4762-bb3d-a4b82e0afd67.vmdk. No consolidation needed. {{(pid=68674) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 766.880130] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-42c1f8bd-3564-46f5-83b0-b8ae2cd36987 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Reconfiguring VM instance instance-0000000b to detach disk 2001 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 766.881291] env[68674]: DEBUG nova.network.neutron [req-eb922912-6826-40d1-9a07-56a3e0cd002c req-1425aba0-40d9-4fa4-92b2-63cd949d9c61 service nova] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.885680] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf921260-bfde-4e1a-b396-63d83239aeb6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.888784] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-470ca857-b16b-44bb-ab83-db02c9db5e58 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.927833] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-631aa03b-64dc-4745-936e-92aaa45a14d0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.931221] env[68674]: DEBUG oslo_vmware.api [None req-42c1f8bd-3564-46f5-83b0-b8ae2cd36987 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] Waiting for the task: (returnval){ [ 766.931221] env[68674]: value = "task-3239952" [ 766.931221] env[68674]: _type = "Task" [ 766.931221] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.947832] env[68674]: DEBUG oslo_vmware.api [None req-42c1f8bd-3564-46f5-83b0-b8ae2cd36987 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] Task: {'id': task-3239952, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.975606] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 766.975606] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f9159425-00d9-4ebd-a7f8-72de79aa47ad {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.983397] env[68674]: DEBUG oslo_vmware.api [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Waiting for the task: (returnval){ [ 766.983397] env[68674]: value = "task-3239953" [ 766.983397] env[68674]: _type = "Task" [ 766.983397] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.993513] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] VM already powered off {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 766.994143] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 766.994143] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.994283] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 766.994404] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 766.994655] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6ca3daab-d56b-46d8-bbe6-c4da3eaf1f82 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.003852] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 767.004503] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 767.004805] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47b46260-cde9-40ae-a0c2-bebebd7f747c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.010686] env[68674]: DEBUG oslo_vmware.api [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Waiting for the task: (returnval){ [ 767.010686] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528a7978-6fec-0db3-177e-bca68df6b994" [ 767.010686] env[68674]: _type = "Task" [ 767.010686] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.021656] env[68674]: DEBUG oslo_vmware.api [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528a7978-6fec-0db3-177e-bca68df6b994, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.067522] env[68674]: DEBUG nova.compute.manager [None req-e86f83c0-80d1-438c-9fc0-9827393eb5f7 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 767.067955] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e86f83c0-80d1-438c-9fc0-9827393eb5f7 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 767.069046] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e639930-db34-472c-b276-f7cb44ee8d9f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.079892] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-e86f83c0-80d1-438c-9fc0-9827393eb5f7 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 767.080102] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1b80c507-415a-446e-a8f9-7d147b8840d2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.086448] env[68674]: DEBUG oslo_vmware.api [None req-e86f83c0-80d1-438c-9fc0-9827393eb5f7 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Waiting for the task: (returnval){ [ 767.086448] env[68674]: value = "task-3239954" [ 767.086448] env[68674]: _type = "Task" [ 767.086448] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.099147] env[68674]: DEBUG oslo_vmware.api [None req-e86f83c0-80d1-438c-9fc0-9827393eb5f7 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Task: {'id': task-3239954, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.248706] env[68674]: DEBUG oslo_vmware.api [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239951, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.268106] env[68674]: INFO nova.compute.manager [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Took 43.64 seconds to build instance. [ 767.301114] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1113b855-5bf4-4119-ae09-0974e5441e61 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.308446] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-232db543-2a67-4b5e-96b7-c4757f88f578 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.343810] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c1948b3-6f0c-4bdb-b9d9-cd87aaa33115 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.351905] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ad370dd-7eb2-4096-8a03-062e524434c1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.366297] env[68674]: DEBUG nova.compute.provider_tree [None req-3064beae-94a2-4818-acbd-5ce58540e841 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 767.404416] env[68674]: DEBUG oslo_concurrency.lockutils [req-eb922912-6826-40d1-9a07-56a3e0cd002c req-1425aba0-40d9-4fa4-92b2-63cd949d9c61 service nova] Releasing lock "refresh_cache-f029042f-d80b-453e-adc9-1e65d7da7aaf" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 767.404840] env[68674]: DEBUG oslo_concurrency.lockutils [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquired lock "refresh_cache-f029042f-d80b-453e-adc9-1e65d7da7aaf" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 767.405012] env[68674]: DEBUG nova.network.neutron [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 767.441828] env[68674]: DEBUG oslo_vmware.api [None req-42c1f8bd-3564-46f5-83b0-b8ae2cd36987 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] Task: {'id': task-3239952, 'name': ReconfigVM_Task, 'duration_secs': 0.39582} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.442233] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-42c1f8bd-3564-46f5-83b0-b8ae2cd36987 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Reconfigured VM instance instance-0000000b to detach disk 2001 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 767.447880] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6317451e-b443-4731-b54e-6bf388ed2678 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.462997] env[68674]: DEBUG oslo_vmware.api [None req-42c1f8bd-3564-46f5-83b0-b8ae2cd36987 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] Waiting for the task: (returnval){ [ 767.462997] env[68674]: value = "task-3239955" [ 767.462997] env[68674]: _type = "Task" [ 767.462997] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.471536] env[68674]: DEBUG oslo_vmware.api [None req-42c1f8bd-3564-46f5-83b0-b8ae2cd36987 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] Task: {'id': task-3239955, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.521291] env[68674]: DEBUG oslo_vmware.api [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528a7978-6fec-0db3-177e-bca68df6b994, 'name': SearchDatastore_Task, 'duration_secs': 0.010017} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.522127] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dabfbf7a-eedd-4373-aa85-3a32a7de112c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.527687] env[68674]: DEBUG oslo_vmware.api [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Waiting for the task: (returnval){ [ 767.527687] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52671f47-5dd3-25b4-0222-a0f4a21a8fc0" [ 767.527687] env[68674]: _type = "Task" [ 767.527687] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.535434] env[68674]: DEBUG oslo_vmware.api [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52671f47-5dd3-25b4-0222-a0f4a21a8fc0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.597939] env[68674]: DEBUG oslo_vmware.api [None req-e86f83c0-80d1-438c-9fc0-9827393eb5f7 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Task: {'id': task-3239954, 'name': PowerOffVM_Task, 'duration_secs': 0.187509} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.598279] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-e86f83c0-80d1-438c-9fc0-9827393eb5f7 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 767.598472] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e86f83c0-80d1-438c-9fc0-9827393eb5f7 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 767.598754] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-025ca3f9-d4d8-4d06-b8ab-8de3c00211e5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.665382] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e86f83c0-80d1-438c-9fc0-9827393eb5f7 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 767.665628] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e86f83c0-80d1-438c-9fc0-9827393eb5f7 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 767.665804] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-e86f83c0-80d1-438c-9fc0-9827393eb5f7 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Deleting the datastore file [datastore2] 714142ec-89ad-44ab-8543-11493172a50b {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 767.666077] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eb0bd68d-c92c-461d-8318-d892ad0198c1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.673015] env[68674]: DEBUG oslo_vmware.api [None req-e86f83c0-80d1-438c-9fc0-9827393eb5f7 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Waiting for the task: (returnval){ [ 767.673015] env[68674]: value = "task-3239957" [ 767.673015] env[68674]: _type = "Task" [ 767.673015] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.748870] env[68674]: DEBUG oslo_vmware.api [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239951, 'name': CloneVM_Task} progress is 95%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.770798] env[68674]: DEBUG oslo_concurrency.lockutils [None req-971981f7-6f5b-472f-abeb-7327a01858fb tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Lock "55727bbc-6b65-4e4c-ba4f-8240efbf052a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 95.038s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 767.787358] env[68674]: DEBUG nova.compute.manager [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 767.788339] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fb312c5-3da5-4068-b783-faa68bd218fe {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.874925] env[68674]: DEBUG nova.scheduler.client.report [None req-3064beae-94a2-4818-acbd-5ce58540e841 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 767.961200] env[68674]: DEBUG nova.network.neutron [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 767.974858] env[68674]: DEBUG oslo_vmware.api [None req-42c1f8bd-3564-46f5-83b0-b8ae2cd36987 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] Task: {'id': task-3239955, 'name': ReconfigVM_Task, 'duration_secs': 0.154027} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.974858] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-42c1f8bd-3564-46f5-83b0-b8ae2cd36987 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647529', 'volume_id': 'd79cd12d-5be5-4762-bb3d-a4b82e0afd67', 'name': 'volume-d79cd12d-5be5-4762-bb3d-a4b82e0afd67', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '503e9328-bbd8-414f-8bea-250ed8247d67', 'attached_at': '', 'detached_at': '', 'volume_id': 'd79cd12d-5be5-4762-bb3d-a4b82e0afd67', 'serial': 'd79cd12d-5be5-4762-bb3d-a4b82e0afd67'} {{(pid=68674) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 768.038476] env[68674]: DEBUG oslo_vmware.api [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52671f47-5dd3-25b4-0222-a0f4a21a8fc0, 'name': SearchDatastore_Task, 'duration_secs': 0.010274} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.038748] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 768.039016] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 2007222e-e4e5-44b3-bd9e-55b4a2143c3e/b84d9354-ef6b-46ca-9dae-6549fa89bbea-rescue.vmdk. {{(pid=68674) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 768.039279] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0132fa7e-d4f5-4fd8-8709-53a9785eee79 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.046326] env[68674]: DEBUG oslo_vmware.api [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Waiting for the task: (returnval){ [ 768.046326] env[68674]: value = "task-3239958" [ 768.046326] env[68674]: _type = "Task" [ 768.046326] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.055789] env[68674]: DEBUG oslo_vmware.api [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': task-3239958, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.183547] env[68674]: DEBUG oslo_vmware.api [None req-e86f83c0-80d1-438c-9fc0-9827393eb5f7 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Task: {'id': task-3239957, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167256} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.184543] env[68674]: DEBUG nova.network.neutron [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Updating instance_info_cache with network_info: [{"id": "eb762aca-5fbd-45f4-a81e-77d2c5d7aaf7", "address": "fa:16:3e:21:74:75", "network": {"id": "1674c138-dbec-4d03-b5b0-d1944ab38577", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-143958570-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a84d9d6e23bd40049c34e6f087252b4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb762aca-5f", "ovs_interfaceid": "eb762aca-5fbd-45f4-a81e-77d2c5d7aaf7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.185888] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-e86f83c0-80d1-438c-9fc0-9827393eb5f7 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 768.186163] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e86f83c0-80d1-438c-9fc0-9827393eb5f7 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 768.187220] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e86f83c0-80d1-438c-9fc0-9827393eb5f7 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 768.187220] env[68674]: INFO nova.compute.manager [None req-e86f83c0-80d1-438c-9fc0-9827393eb5f7 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Took 1.12 seconds to destroy the instance on the hypervisor. [ 768.187220] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e86f83c0-80d1-438c-9fc0-9827393eb5f7 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 768.187220] env[68674]: DEBUG nova.compute.manager [-] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 768.187220] env[68674]: DEBUG nova.network.neutron [-] [instance: 714142ec-89ad-44ab-8543-11493172a50b] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 768.249856] env[68674]: DEBUG oslo_vmware.api [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239951, 'name': CloneVM_Task, 'duration_secs': 1.159382} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.252316] env[68674]: INFO nova.virt.vmwareapi.vmops [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Created linked-clone VM from snapshot [ 768.253199] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7582854b-b605-4600-8adf-0835e10fd51b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.262505] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Uploading image cfe66e69-342f-49f5-a686-4971a7ab072c {{(pid=68674) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 768.273487] env[68674]: DEBUG nova.compute.manager [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 768.285342] env[68674]: DEBUG oslo_vmware.rw_handles [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 768.285342] env[68674]: value = "vm-647535" [ 768.285342] env[68674]: _type = "VirtualMachine" [ 768.285342] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 768.285667] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-35c75643-ee4c-4fc6-a28d-d89e3dd8429d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.294643] env[68674]: DEBUG oslo_vmware.rw_handles [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lease: (returnval){ [ 768.294643] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d42810-1892-7d9b-d1bd-e06cc1d2f1d9" [ 768.294643] env[68674]: _type = "HttpNfcLease" [ 768.294643] env[68674]: } obtained for exporting VM: (result){ [ 768.294643] env[68674]: value = "vm-647535" [ 768.294643] env[68674]: _type = "VirtualMachine" [ 768.294643] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 768.294996] env[68674]: DEBUG oslo_vmware.api [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the lease: (returnval){ [ 768.294996] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d42810-1892-7d9b-d1bd-e06cc1d2f1d9" [ 768.294996] env[68674]: _type = "HttpNfcLease" [ 768.294996] env[68674]: } to be ready. {{(pid=68674) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 768.299451] env[68674]: INFO nova.compute.manager [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] instance snapshotting [ 768.302815] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4d807d3-e6c4-4cc7-8bcb-2ad3e3d2ae75 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.308957] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 768.308957] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d42810-1892-7d9b-d1bd-e06cc1d2f1d9" [ 768.308957] env[68674]: _type = "HttpNfcLease" [ 768.308957] env[68674]: } is initializing. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 768.328629] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71660cca-2f44-4f5c-b311-bb77dd27d87f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.380051] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3064beae-94a2-4818-acbd-5ce58540e841 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.263s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 768.386033] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e5d9b7e7-2602-4cfb-973d-8ba894264e4c tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.542s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 768.386033] env[68674]: DEBUG nova.objects.instance [None req-e5d9b7e7-2602-4cfb-973d-8ba894264e4c tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lazy-loading 'resources' on Instance uuid e75d2bc7-f356-4443-9641-d9ebf35843cd {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 768.450250] env[68674]: INFO nova.scheduler.client.report [None req-3064beae-94a2-4818-acbd-5ce58540e841 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Deleted allocations for instance 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49 [ 768.560639] env[68674]: DEBUG oslo_vmware.api [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': task-3239958, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.49519} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.561026] env[68674]: INFO nova.virt.vmwareapi.ds_util [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 2007222e-e4e5-44b3-bd9e-55b4a2143c3e/b84d9354-ef6b-46ca-9dae-6549fa89bbea-rescue.vmdk. [ 768.561924] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03072c88-c83b-4c19-b09d-32e2f34cf632 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.588600] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Reconfiguring VM instance instance-00000032 to attach disk [datastore2] 2007222e-e4e5-44b3-bd9e-55b4a2143c3e/b84d9354-ef6b-46ca-9dae-6549fa89bbea-rescue.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 768.590602] env[68674]: DEBUG nova.objects.instance [None req-42c1f8bd-3564-46f5-83b0-b8ae2cd36987 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] Lazy-loading 'flavor' on Instance uuid 503e9328-bbd8-414f-8bea-250ed8247d67 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 768.591572] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e37994e9-3b1f-4a77-9faa-fd0761469933 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.614706] env[68674]: DEBUG oslo_vmware.api [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Waiting for the task: (returnval){ [ 768.614706] env[68674]: value = "task-3239960" [ 768.614706] env[68674]: _type = "Task" [ 768.614706] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.623457] env[68674]: DEBUG oslo_vmware.api [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': task-3239960, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.691021] env[68674]: DEBUG oslo_concurrency.lockutils [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Releasing lock "refresh_cache-f029042f-d80b-453e-adc9-1e65d7da7aaf" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 768.691021] env[68674]: DEBUG nova.compute.manager [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Instance network_info: |[{"id": "eb762aca-5fbd-45f4-a81e-77d2c5d7aaf7", "address": "fa:16:3e:21:74:75", "network": {"id": "1674c138-dbec-4d03-b5b0-d1944ab38577", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-143958570-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a84d9d6e23bd40049c34e6f087252b4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb762aca-5f", "ovs_interfaceid": "eb762aca-5fbd-45f4-a81e-77d2c5d7aaf7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 768.691396] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:74:75', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24ec44b7-0acf-4ff9-8bb3-4641b74af7a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eb762aca-5fbd-45f4-a81e-77d2c5d7aaf7', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 768.698869] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Creating folder: Project (a84d9d6e23bd40049c34e6f087252b4e). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 768.699746] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9186d7a3-47db-4782-9f0a-d6ba449ffe31 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.715218] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Created folder: Project (a84d9d6e23bd40049c34e6f087252b4e) in parent group-v647377. [ 768.715465] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Creating folder: Instances. Parent ref: group-v647536. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 768.718684] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3fffa85c-af83-488a-9007-6acd33be1c5e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.720675] env[68674]: DEBUG nova.compute.manager [req-6846858f-2d35-424b-b6a8-94baf3d16d46 req-45570d8c-7af3-4197-8b36-86d233d68f4f service nova] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Received event network-vif-deleted-213a8151-04de-4dee-8d0e-06db0fbd89aa {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 768.720919] env[68674]: INFO nova.compute.manager [req-6846858f-2d35-424b-b6a8-94baf3d16d46 req-45570d8c-7af3-4197-8b36-86d233d68f4f service nova] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Neutron deleted interface 213a8151-04de-4dee-8d0e-06db0fbd89aa; detaching it from the instance and deleting it from the info cache [ 768.721050] env[68674]: DEBUG nova.network.neutron [req-6846858f-2d35-424b-b6a8-94baf3d16d46 req-45570d8c-7af3-4197-8b36-86d233d68f4f service nova] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.730266] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Created folder: Instances in parent group-v647536. [ 768.730455] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 768.731181] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 768.731181] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9b8b7918-e75b-4c4d-95a2-b7bbe838d695 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.750091] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 768.750091] env[68674]: value = "task-3239963" [ 768.750091] env[68674]: _type = "Task" [ 768.750091] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.759859] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239963, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.804137] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 768.804137] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d42810-1892-7d9b-d1bd-e06cc1d2f1d9" [ 768.804137] env[68674]: _type = "HttpNfcLease" [ 768.804137] env[68674]: } is ready. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 768.804837] env[68674]: DEBUG oslo_vmware.rw_handles [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 768.804837] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d42810-1892-7d9b-d1bd-e06cc1d2f1d9" [ 768.804837] env[68674]: _type = "HttpNfcLease" [ 768.804837] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 768.805214] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a6d584e-158b-44d8-b194-b8b46bcd8411 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.813604] env[68674]: DEBUG oslo_vmware.rw_handles [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a36860-cd84-a298-ea72-94546584cc06/disk-0.vmdk from lease info. {{(pid=68674) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 768.813833] env[68674]: DEBUG oslo_vmware.rw_handles [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a36860-cd84-a298-ea72-94546584cc06/disk-0.vmdk for reading. {{(pid=68674) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 768.816420] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 768.877018] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Creating Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 768.877018] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c1ed374a-3ecc-465f-9711-e8c24bc56519 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.883661] env[68674]: DEBUG oslo_vmware.api [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for the task: (returnval){ [ 768.883661] env[68674]: value = "task-3239964" [ 768.883661] env[68674]: _type = "Task" [ 768.883661] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.894618] env[68674]: DEBUG oslo_vmware.api [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239964, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.915819] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d4a3682c-aab4-41c7-a014-f9235bb3ffca {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.960635] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3064beae-94a2-4818-acbd-5ce58540e841 tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Lock "1b405b1f-ee1f-4e6e-9355-de8b5c26ab49" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.638s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 769.003447] env[68674]: DEBUG nova.compute.manager [None req-716da29d-dc38-425e-8baa-f65dd4405d12 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 769.008265] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f5854db-0a68-4c29-a0c6-6da599d66989 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.128897] env[68674]: DEBUG oslo_vmware.api [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': task-3239960, 'name': ReconfigVM_Task, 'duration_secs': 0.30004} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.129086] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Reconfigured VM instance instance-00000032 to attach disk [datastore2] 2007222e-e4e5-44b3-bd9e-55b4a2143c3e/b84d9354-ef6b-46ca-9dae-6549fa89bbea-rescue.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 769.131362] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d00b8c63-bbd6-4a02-b3ef-6ed8b2bd6490 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.166214] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-00669c10-9c89-4b46-af1d-ca0c8276dc37 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.177720] env[68674]: DEBUG nova.network.neutron [-] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 769.185216] env[68674]: DEBUG oslo_vmware.api [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Waiting for the task: (returnval){ [ 769.185216] env[68674]: value = "task-3239965" [ 769.185216] env[68674]: _type = "Task" [ 769.185216] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.197485] env[68674]: DEBUG oslo_vmware.api [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': task-3239965, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.224053] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-98dcfcf0-a811-43a9-9ad7-a15ab9c79dab {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.237206] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4424cddb-0a5f-41a0-a722-40167e848832 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.259346] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239963, 'name': CreateVM_Task, 'duration_secs': 0.355904} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.259681] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 769.260370] env[68674]: DEBUG oslo_concurrency.lockutils [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.260614] env[68674]: DEBUG oslo_concurrency.lockutils [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 769.261178] env[68674]: DEBUG oslo_concurrency.lockutils [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 769.262156] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15eb0b81-f213-477d-93a1-566e955bd770 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.281458] env[68674]: DEBUG nova.compute.manager [req-6846858f-2d35-424b-b6a8-94baf3d16d46 req-45570d8c-7af3-4197-8b36-86d233d68f4f service nova] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Detach interface failed, port_id=213a8151-04de-4dee-8d0e-06db0fbd89aa, reason: Instance 714142ec-89ad-44ab-8543-11493172a50b could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 769.286042] env[68674]: DEBUG oslo_vmware.api [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 769.286042] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527b33f8-55d7-4758-9b65-5f9ca71a9c7a" [ 769.286042] env[68674]: _type = "Task" [ 769.286042] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.294835] env[68674]: DEBUG oslo_vmware.api [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527b33f8-55d7-4758-9b65-5f9ca71a9c7a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.398892] env[68674]: DEBUG oslo_vmware.api [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239964, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.516394] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5527bd5-6749-4426-af2a-8e1b4debf1de {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.526476] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-378935ef-9885-41fe-985d-420b77e1d9da {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.535609] env[68674]: INFO nova.compute.manager [None req-716da29d-dc38-425e-8baa-f65dd4405d12 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] instance snapshotting [ 769.541059] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4482b1ab-e05a-4098-9826-35571039a8f7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.589073] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcc1a223-7bd9-432f-aa77-1538d118fb8b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.592721] env[68674]: DEBUG oslo_concurrency.lockutils [None req-49ceb180-af1c-4662-ba90-7ea1f04b69ec tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Acquiring lock "627fb348-1749-4480-97b9-b479a182d4ee" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 769.593109] env[68674]: DEBUG oslo_concurrency.lockutils [None req-49ceb180-af1c-4662-ba90-7ea1f04b69ec tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Lock "627fb348-1749-4480-97b9-b479a182d4ee" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 769.593373] env[68674]: DEBUG oslo_concurrency.lockutils [None req-49ceb180-af1c-4662-ba90-7ea1f04b69ec tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Acquiring lock "627fb348-1749-4480-97b9-b479a182d4ee-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 769.593608] env[68674]: DEBUG oslo_concurrency.lockutils [None req-49ceb180-af1c-4662-ba90-7ea1f04b69ec tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Lock "627fb348-1749-4480-97b9-b479a182d4ee-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 769.593814] env[68674]: DEBUG oslo_concurrency.lockutils [None req-49ceb180-af1c-4662-ba90-7ea1f04b69ec tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Lock "627fb348-1749-4480-97b9-b479a182d4ee-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 769.596327] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0c92236-474b-47a1-8d93-76a4ddea8f0e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.601204] env[68674]: INFO nova.compute.manager [None req-49ceb180-af1c-4662-ba90-7ea1f04b69ec tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Terminating instance [ 769.612319] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4183196-b3b6-413d-bae6-c638abc30871 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.630357] env[68674]: DEBUG oslo_concurrency.lockutils [None req-42c1f8bd-3564-46f5-83b0-b8ae2cd36987 tempest-VolumesAssistedSnapshotsTest-1563309189 tempest-VolumesAssistedSnapshotsTest-1563309189-project-admin] Lock "503e9328-bbd8-414f-8bea-250ed8247d67" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.447s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 769.630357] env[68674]: DEBUG nova.compute.provider_tree [None req-e5d9b7e7-2602-4cfb-973d-8ba894264e4c tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 769.680575] env[68674]: INFO nova.compute.manager [-] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Took 1.49 seconds to deallocate network for instance. [ 769.701618] env[68674]: DEBUG oslo_vmware.api [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': task-3239965, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.798083] env[68674]: DEBUG oslo_vmware.api [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527b33f8-55d7-4758-9b65-5f9ca71a9c7a, 'name': SearchDatastore_Task, 'duration_secs': 0.012755} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.798083] env[68674]: DEBUG oslo_concurrency.lockutils [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 769.798083] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 769.798083] env[68674]: DEBUG oslo_concurrency.lockutils [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.798347] env[68674]: DEBUG oslo_concurrency.lockutils [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 769.798347] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 769.798347] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f84b655a-9997-4dcd-8da3-5c1833afa1b9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.807752] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 769.807953] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 769.808682] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ee35d82-bd71-43d3-a193-d33995e694b1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.813916] env[68674]: DEBUG oslo_vmware.api [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 769.813916] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ffcfe3-7b00-99c5-01aa-ce91e93c2be2" [ 769.813916] env[68674]: _type = "Task" [ 769.813916] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.821774] env[68674]: DEBUG oslo_vmware.api [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ffcfe3-7b00-99c5-01aa-ce91e93c2be2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.893389] env[68674]: DEBUG oslo_vmware.api [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239964, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.105491] env[68674]: DEBUG nova.compute.manager [None req-49ceb180-af1c-4662-ba90-7ea1f04b69ec tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 770.105750] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-49ceb180-af1c-4662-ba90-7ea1f04b69ec tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 770.106693] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae0d478-5826-4bc5-82d8-d547118a29c5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.115089] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-49ceb180-af1c-4662-ba90-7ea1f04b69ec tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 770.115421] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-557f0289-98c7-4ca2-a085-94ae806c8e2e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.119207] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-716da29d-dc38-425e-8baa-f65dd4405d12 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Creating Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 770.119616] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-9c583acf-d18b-41a0-91d3-73a3a5d214ed {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.122603] env[68674]: DEBUG oslo_vmware.api [None req-49ceb180-af1c-4662-ba90-7ea1f04b69ec tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Waiting for the task: (returnval){ [ 770.122603] env[68674]: value = "task-3239967" [ 770.122603] env[68674]: _type = "Task" [ 770.122603] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.128098] env[68674]: DEBUG oslo_vmware.api [None req-716da29d-dc38-425e-8baa-f65dd4405d12 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 770.128098] env[68674]: value = "task-3239968" [ 770.128098] env[68674]: _type = "Task" [ 770.128098] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.135293] env[68674]: DEBUG nova.scheduler.client.report [None req-e5d9b7e7-2602-4cfb-973d-8ba894264e4c tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 770.139071] env[68674]: DEBUG oslo_vmware.api [None req-49ceb180-af1c-4662-ba90-7ea1f04b69ec tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': task-3239967, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.145187] env[68674]: DEBUG oslo_vmware.api [None req-716da29d-dc38-425e-8baa-f65dd4405d12 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239968, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.200566] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e86f83c0-80d1-438c-9fc0-9827393eb5f7 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 770.210881] env[68674]: DEBUG oslo_vmware.api [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': task-3239965, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.325034] env[68674]: DEBUG oslo_vmware.api [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ffcfe3-7b00-99c5-01aa-ce91e93c2be2, 'name': SearchDatastore_Task, 'duration_secs': 0.009594} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.325756] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55232218-54ef-4d57-9d41-ad119f9e5313 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.331631] env[68674]: DEBUG oslo_vmware.api [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 770.331631] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d900f6-78d8-39bc-cf58-362c9d0eabce" [ 770.331631] env[68674]: _type = "Task" [ 770.331631] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.343637] env[68674]: DEBUG oslo_vmware.api [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d900f6-78d8-39bc-cf58-362c9d0eabce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.395266] env[68674]: DEBUG oslo_vmware.api [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239964, 'name': CreateSnapshot_Task, 'duration_secs': 1.037042} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.395546] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Created Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 770.397903] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e3e1695-a3dc-49f5-9cd0-5e936cc223b3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.637093] env[68674]: DEBUG oslo_vmware.api [None req-716da29d-dc38-425e-8baa-f65dd4405d12 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239968, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.640991] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e5d9b7e7-2602-4cfb-973d-8ba894264e4c tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.257s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.642641] env[68674]: DEBUG oslo_vmware.api [None req-49ceb180-af1c-4662-ba90-7ea1f04b69ec tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': task-3239967, 'name': PowerOffVM_Task, 'duration_secs': 0.199912} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.643290] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ba0be2de-5174-4a03-a086-9e1cb84f7775 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 35.078s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 770.645249] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-49ceb180-af1c-4662-ba90-7ea1f04b69ec tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 770.646493] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-49ceb180-af1c-4662-ba90-7ea1f04b69ec tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 770.646493] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dad83c09-23d7-4225-823e-1782a3736175 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.708972] env[68674]: DEBUG oslo_vmware.api [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': task-3239965, 'name': ReconfigVM_Task, 'duration_secs': 1.278457} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.709732] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 770.709732] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f81dbcfd-d3df-4ad9-8c37-37d8f7962f2d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.718311] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-49ceb180-af1c-4662-ba90-7ea1f04b69ec tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 770.718311] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-49ceb180-af1c-4662-ba90-7ea1f04b69ec tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 770.718311] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-49ceb180-af1c-4662-ba90-7ea1f04b69ec tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Deleting the datastore file [datastore2] 627fb348-1749-4480-97b9-b479a182d4ee {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 770.718966] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-678928ef-b13b-44d9-9642-8516a961c7cc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.720955] env[68674]: DEBUG oslo_vmware.api [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Waiting for the task: (returnval){ [ 770.720955] env[68674]: value = "task-3239970" [ 770.720955] env[68674]: _type = "Task" [ 770.720955] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.721941] env[68674]: INFO nova.scheduler.client.report [None req-e5d9b7e7-2602-4cfb-973d-8ba894264e4c tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Deleted allocations for instance e75d2bc7-f356-4443-9641-d9ebf35843cd [ 770.732493] env[68674]: DEBUG oslo_vmware.api [None req-49ceb180-af1c-4662-ba90-7ea1f04b69ec tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Waiting for the task: (returnval){ [ 770.732493] env[68674]: value = "task-3239971" [ 770.732493] env[68674]: _type = "Task" [ 770.732493] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.738079] env[68674]: DEBUG oslo_vmware.api [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': task-3239970, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.743313] env[68674]: DEBUG oslo_vmware.api [None req-49ceb180-af1c-4662-ba90-7ea1f04b69ec tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': task-3239971, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.842502] env[68674]: DEBUG oslo_vmware.api [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d900f6-78d8-39bc-cf58-362c9d0eabce, 'name': SearchDatastore_Task, 'duration_secs': 0.009728} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.842920] env[68674]: DEBUG oslo_concurrency.lockutils [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 770.843266] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] f029042f-d80b-453e-adc9-1e65d7da7aaf/f029042f-d80b-453e-adc9-1e65d7da7aaf.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 770.843557] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9eb80ab2-e155-40e4-a776-de1d4d1e57c9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.849885] env[68674]: DEBUG oslo_vmware.api [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 770.849885] env[68674]: value = "task-3239972" [ 770.849885] env[68674]: _type = "Task" [ 770.849885] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.857893] env[68674]: DEBUG oslo_vmware.api [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3239972, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.917938] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Creating linked-clone VM from snapshot {{(pid=68674) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 770.918316] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-91b7553a-cfa9-4362-9854-e7e1ef9da388 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.927109] env[68674]: DEBUG oslo_vmware.api [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for the task: (returnval){ [ 770.927109] env[68674]: value = "task-3239973" [ 770.927109] env[68674]: _type = "Task" [ 770.927109] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.936942] env[68674]: DEBUG oslo_vmware.api [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239973, 'name': CloneVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.141320] env[68674]: DEBUG oslo_vmware.api [None req-716da29d-dc38-425e-8baa-f65dd4405d12 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239968, 'name': CreateSnapshot_Task, 'duration_secs': 0.71055} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.141697] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-716da29d-dc38-425e-8baa-f65dd4405d12 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Created Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 771.143081] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff91fce1-13b0-4c0d-ad56-cb9ba81c65e1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.234997] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e5d9b7e7-2602-4cfb-973d-8ba894264e4c tempest-ListServerFiltersTestJSON-876714713 tempest-ListServerFiltersTestJSON-876714713-project-member] Lock "e75d2bc7-f356-4443-9641-d9ebf35843cd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.942s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 771.243905] env[68674]: DEBUG oslo_vmware.api [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': task-3239970, 'name': PowerOnVM_Task, 'duration_secs': 0.508885} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.244908] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 771.248116] env[68674]: DEBUG nova.compute.manager [None req-7e722db8-dc05-4160-9708-5a453f97a3b9 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 771.249018] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-732dde5b-78a8-4428-a9b3-d767c7f4f66e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.258542] env[68674]: DEBUG oslo_vmware.api [None req-49ceb180-af1c-4662-ba90-7ea1f04b69ec tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Task: {'id': task-3239971, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145863} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.259639] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-49ceb180-af1c-4662-ba90-7ea1f04b69ec tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 771.259929] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-49ceb180-af1c-4662-ba90-7ea1f04b69ec tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 771.260058] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-49ceb180-af1c-4662-ba90-7ea1f04b69ec tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 771.260250] env[68674]: INFO nova.compute.manager [None req-49ceb180-af1c-4662-ba90-7ea1f04b69ec tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Took 1.15 seconds to destroy the instance on the hypervisor. [ 771.260527] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-49ceb180-af1c-4662-ba90-7ea1f04b69ec tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 771.260774] env[68674]: DEBUG nova.compute.manager [-] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 771.260886] env[68674]: DEBUG nova.network.neutron [-] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 771.364572] env[68674]: DEBUG oslo_vmware.api [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3239972, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.441688] env[68674]: DEBUG oslo_vmware.api [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239973, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.670027] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-716da29d-dc38-425e-8baa-f65dd4405d12 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Creating linked-clone VM from snapshot {{(pid=68674) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 771.673501] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-190b499f-0736-49a3-9e3a-7dd09a18f94f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.682847] env[68674]: DEBUG oslo_vmware.api [None req-716da29d-dc38-425e-8baa-f65dd4405d12 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 771.682847] env[68674]: value = "task-3239974" [ 771.682847] env[68674]: _type = "Task" [ 771.682847] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.693187] env[68674]: DEBUG oslo_vmware.api [None req-716da29d-dc38-425e-8baa-f65dd4405d12 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239974, 'name': CloneVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.832415] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d07223d-a34a-4a7f-a7ee-dfa9a74f4697 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.842443] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e89565ea-efc0-41d3-865b-c30c30c364a4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.877978] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43645431-6c1b-4a0e-91d3-324b9e9085eb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.886701] env[68674]: DEBUG oslo_vmware.api [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3239972, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.531303} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.888869] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] f029042f-d80b-453e-adc9-1e65d7da7aaf/f029042f-d80b-453e-adc9-1e65d7da7aaf.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 771.889115] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 771.889435] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dd5eb20f-2489-4eac-8124-9e594c2c4ec5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.892206] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5846afe4-3ccb-446a-a240-2ba7253a5ae3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.916360] env[68674]: DEBUG oslo_vmware.api [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 771.916360] env[68674]: value = "task-3239975" [ 771.916360] env[68674]: _type = "Task" [ 771.916360] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.917756] env[68674]: DEBUG nova.compute.provider_tree [None req-ba0be2de-5174-4a03-a086-9e1cb84f7775 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 771.930776] env[68674]: DEBUG oslo_vmware.api [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3239975, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.940687] env[68674]: DEBUG oslo_vmware.api [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239973, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.008240] env[68674]: DEBUG nova.compute.manager [req-b12adde5-426f-4716-8512-35e786a2dcc4 req-38a815ce-f300-477a-a01c-a76035695093 service nova] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Received event network-vif-deleted-07dd5c65-5707-47fd-baf2-f100c76d737f {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 772.008621] env[68674]: INFO nova.compute.manager [req-b12adde5-426f-4716-8512-35e786a2dcc4 req-38a815ce-f300-477a-a01c-a76035695093 service nova] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Neutron deleted interface 07dd5c65-5707-47fd-baf2-f100c76d737f; detaching it from the instance and deleting it from the info cache [ 772.008621] env[68674]: DEBUG nova.network.neutron [req-b12adde5-426f-4716-8512-35e786a2dcc4 req-38a815ce-f300-477a-a01c-a76035695093 service nova] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 772.193775] env[68674]: DEBUG oslo_vmware.api [None req-716da29d-dc38-425e-8baa-f65dd4405d12 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239974, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.323762] env[68674]: DEBUG nova.network.neutron [-] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 772.434652] env[68674]: DEBUG oslo_vmware.api [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3239975, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082989} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.438033] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 772.438842] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62fd8df5-5353-45d6-bc19-08aa3ecc29ba {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.447995] env[68674]: DEBUG oslo_vmware.api [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239973, 'name': CloneVM_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.470127] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Reconfiguring VM instance instance-00000035 to attach disk [datastore1] f029042f-d80b-453e-adc9-1e65d7da7aaf/f029042f-d80b-453e-adc9-1e65d7da7aaf.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 772.470848] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b37eecb5-654d-48a0-b286-c6184eeb8cd6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.498997] env[68674]: DEBUG oslo_vmware.api [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 772.498997] env[68674]: value = "task-3239976" [ 772.498997] env[68674]: _type = "Task" [ 772.498997] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.500161] env[68674]: DEBUG nova.scheduler.client.report [None req-ba0be2de-5174-4a03-a086-9e1cb84f7775 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Updated inventory for provider ade3f042-7427-494b-9654-0b65e074850c with generation 78 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 772.500452] env[68674]: DEBUG nova.compute.provider_tree [None req-ba0be2de-5174-4a03-a086-9e1cb84f7775 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Updating resource provider ade3f042-7427-494b-9654-0b65e074850c generation from 78 to 79 during operation: update_inventory {{(pid=68674) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 772.500724] env[68674]: DEBUG nova.compute.provider_tree [None req-ba0be2de-5174-4a03-a086-9e1cb84f7775 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 772.513748] env[68674]: DEBUG oslo_vmware.api [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3239976, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.514092] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-863b7725-23c7-444a-a25d-ae0893cc4d64 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.524901] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e1f0d9a-c725-4548-bbeb-33c979d158f4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.564907] env[68674]: DEBUG nova.compute.manager [req-b12adde5-426f-4716-8512-35e786a2dcc4 req-38a815ce-f300-477a-a01c-a76035695093 service nova] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Detach interface failed, port_id=07dd5c65-5707-47fd-baf2-f100c76d737f, reason: Instance 627fb348-1749-4480-97b9-b479a182d4ee could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 772.696320] env[68674]: DEBUG oslo_vmware.api [None req-716da29d-dc38-425e-8baa-f65dd4405d12 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239974, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.827885] env[68674]: INFO nova.compute.manager [-] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Took 1.57 seconds to deallocate network for instance. [ 772.945356] env[68674]: DEBUG oslo_vmware.api [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239973, 'name': CloneVM_Task, 'duration_secs': 1.544652} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.945737] env[68674]: INFO nova.virt.vmwareapi.vmops [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Created linked-clone VM from snapshot [ 772.946675] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2666c4db-fcb4-45b9-9f40-50e10e3038bd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.954523] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Uploading image 8dd8deab-1e1d-41ab-84e6-385a2c096d5f {{(pid=68674) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 772.979726] env[68674]: DEBUG oslo_vmware.rw_handles [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 772.979726] env[68674]: value = "vm-647541" [ 772.979726] env[68674]: _type = "VirtualMachine" [ 772.979726] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 772.980168] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-2cdfbb53-b6fb-4fc1-b606-903aa55689dd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.987651] env[68674]: DEBUG oslo_vmware.rw_handles [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Lease: (returnval){ [ 772.987651] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524e8ae8-5223-a6f0-b9cf-c85985090623" [ 772.987651] env[68674]: _type = "HttpNfcLease" [ 772.987651] env[68674]: } obtained for exporting VM: (result){ [ 772.987651] env[68674]: value = "vm-647541" [ 772.987651] env[68674]: _type = "VirtualMachine" [ 772.987651] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 772.987969] env[68674]: DEBUG oslo_vmware.api [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for the lease: (returnval){ [ 772.987969] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524e8ae8-5223-a6f0-b9cf-c85985090623" [ 772.987969] env[68674]: _type = "HttpNfcLease" [ 772.987969] env[68674]: } to be ready. {{(pid=68674) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 772.995014] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 772.995014] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524e8ae8-5223-a6f0-b9cf-c85985090623" [ 772.995014] env[68674]: _type = "HttpNfcLease" [ 772.995014] env[68674]: } is initializing. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 773.018727] env[68674]: DEBUG oslo_vmware.api [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3239976, 'name': ReconfigVM_Task, 'duration_secs': 0.309265} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.018727] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Reconfigured VM instance instance-00000035 to attach disk [datastore1] f029042f-d80b-453e-adc9-1e65d7da7aaf/f029042f-d80b-453e-adc9-1e65d7da7aaf.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 773.018943] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e2e60b8f-562a-41f7-b233-4cef56bf8a65 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.026870] env[68674]: DEBUG oslo_vmware.api [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 773.026870] env[68674]: value = "task-3239978" [ 773.026870] env[68674]: _type = "Task" [ 773.026870] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.038102] env[68674]: DEBUG oslo_vmware.api [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3239978, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.195610] env[68674]: DEBUG oslo_vmware.api [None req-716da29d-dc38-425e-8baa-f65dd4405d12 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239974, 'name': CloneVM_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.338466] env[68674]: DEBUG oslo_concurrency.lockutils [None req-49ceb180-af1c-4662-ba90-7ea1f04b69ec tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 773.351604] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bca91cbe-540d-48a7-912a-2085c2495501 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Acquiring lock "503e9328-bbd8-414f-8bea-250ed8247d67" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 773.351604] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bca91cbe-540d-48a7-912a-2085c2495501 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Lock "503e9328-bbd8-414f-8bea-250ed8247d67" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 773.351792] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bca91cbe-540d-48a7-912a-2085c2495501 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Acquiring lock "503e9328-bbd8-414f-8bea-250ed8247d67-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 773.351906] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bca91cbe-540d-48a7-912a-2085c2495501 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Lock "503e9328-bbd8-414f-8bea-250ed8247d67-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 773.355283] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bca91cbe-540d-48a7-912a-2085c2495501 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Lock "503e9328-bbd8-414f-8bea-250ed8247d67-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 773.355283] env[68674]: INFO nova.compute.manager [None req-bca91cbe-540d-48a7-912a-2085c2495501 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Terminating instance [ 773.497015] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 773.497015] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524e8ae8-5223-a6f0-b9cf-c85985090623" [ 773.497015] env[68674]: _type = "HttpNfcLease" [ 773.497015] env[68674]: } is ready. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 773.497383] env[68674]: DEBUG oslo_vmware.rw_handles [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 773.497383] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524e8ae8-5223-a6f0-b9cf-c85985090623" [ 773.497383] env[68674]: _type = "HttpNfcLease" [ 773.497383] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 773.498356] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ecdb121-1d62-4580-9f4e-4795303e2eca {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.505961] env[68674]: DEBUG oslo_vmware.rw_handles [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52500686-3daf-f132-58af-e0d0971456c6/disk-0.vmdk from lease info. {{(pid=68674) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 773.506183] env[68674]: DEBUG oslo_vmware.rw_handles [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52500686-3daf-f132-58af-e0d0971456c6/disk-0.vmdk for reading. {{(pid=68674) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 773.563815] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ba0be2de-5174-4a03-a086-9e1cb84f7775 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.920s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 773.568994] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7ce387fe-db22-4619-9fd2-847beedf68df tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.287s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 773.569102] env[68674]: DEBUG nova.objects.instance [None req-7ce387fe-db22-4619-9fd2-847beedf68df tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Lazy-loading 'resources' on Instance uuid 3c8459db-cc54-4644-8e4c-83c87017a186 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 773.576441] env[68674]: DEBUG nova.compute.manager [req-1ac1b873-80bc-4260-9517-21a9e855f238 req-fa8595bf-bb84-4929-8aef-1f5dbe8a5b3d service nova] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Received event network-changed-71dd9ef4-7bf9-4f8c-a04c-23431a1d8112 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 773.576662] env[68674]: DEBUG nova.compute.manager [req-1ac1b873-80bc-4260-9517-21a9e855f238 req-fa8595bf-bb84-4929-8aef-1f5dbe8a5b3d service nova] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Refreshing instance network info cache due to event network-changed-71dd9ef4-7bf9-4f8c-a04c-23431a1d8112. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 773.576879] env[68674]: DEBUG oslo_concurrency.lockutils [req-1ac1b873-80bc-4260-9517-21a9e855f238 req-fa8595bf-bb84-4929-8aef-1f5dbe8a5b3d service nova] Acquiring lock "refresh_cache-2007222e-e4e5-44b3-bd9e-55b4a2143c3e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.577034] env[68674]: DEBUG oslo_concurrency.lockutils [req-1ac1b873-80bc-4260-9517-21a9e855f238 req-fa8595bf-bb84-4929-8aef-1f5dbe8a5b3d service nova] Acquired lock "refresh_cache-2007222e-e4e5-44b3-bd9e-55b4a2143c3e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 773.577196] env[68674]: DEBUG nova.network.neutron [req-1ac1b873-80bc-4260-9517-21a9e855f238 req-fa8595bf-bb84-4929-8aef-1f5dbe8a5b3d service nova] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Refreshing network info cache for port 71dd9ef4-7bf9-4f8c-a04c-23431a1d8112 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 773.586333] env[68674]: DEBUG oslo_vmware.api [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3239978, 'name': Rename_Task, 'duration_secs': 0.155462} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.586611] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 773.586856] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5a0e344d-361d-473d-b44e-17a74b991d98 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.595650] env[68674]: DEBUG oslo_vmware.api [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 773.595650] env[68674]: value = "task-3239979" [ 773.595650] env[68674]: _type = "Task" [ 773.595650] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.605027] env[68674]: DEBUG oslo_vmware.api [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3239979, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.623292] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e5503829-dd0b-4172-bc37-c225347287b3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.696337] env[68674]: DEBUG oslo_vmware.api [None req-716da29d-dc38-425e-8baa-f65dd4405d12 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239974, 'name': CloneVM_Task, 'duration_secs': 1.567828} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.696616] env[68674]: INFO nova.virt.vmwareapi.vmops [None req-716da29d-dc38-425e-8baa-f65dd4405d12 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Created linked-clone VM from snapshot [ 773.697457] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb71e05e-dc71-470d-96fe-fb225208cd81 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.705828] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-716da29d-dc38-425e-8baa-f65dd4405d12 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Uploading image 433cc68d-3aca-4257-a6f3-9e948eec17f6 {{(pid=68674) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 773.858210] env[68674]: DEBUG nova.compute.manager [None req-bca91cbe-540d-48a7-912a-2085c2495501 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 773.858517] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bca91cbe-540d-48a7-912a-2085c2495501 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 773.859599] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5320bdf-22a5-4a46-9627-0ebd5549a315 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.869950] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bca91cbe-540d-48a7-912a-2085c2495501 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 773.870198] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4e3a4a99-e501-474c-bc96-d88a821f5919 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.879818] env[68674]: DEBUG oslo_vmware.api [None req-bca91cbe-540d-48a7-912a-2085c2495501 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Waiting for the task: (returnval){ [ 773.879818] env[68674]: value = "task-3239980" [ 773.879818] env[68674]: _type = "Task" [ 773.879818] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.884708] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-716da29d-dc38-425e-8baa-f65dd4405d12 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Destroying the VM {{(pid=68674) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 773.884981] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-486286a8-533e-4de7-bfb9-c54a5e5b067e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.893671] env[68674]: DEBUG oslo_vmware.api [None req-bca91cbe-540d-48a7-912a-2085c2495501 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Task: {'id': task-3239980, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.895311] env[68674]: DEBUG oslo_vmware.api [None req-716da29d-dc38-425e-8baa-f65dd4405d12 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 773.895311] env[68674]: value = "task-3239981" [ 773.895311] env[68674]: _type = "Task" [ 773.895311] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.108270] env[68674]: DEBUG oslo_vmware.api [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3239979, 'name': PowerOnVM_Task, 'duration_secs': 0.508515} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.108490] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 774.108832] env[68674]: INFO nova.compute.manager [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Took 9.81 seconds to spawn the instance on the hypervisor. [ 774.109043] env[68674]: DEBUG nova.compute.manager [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 774.110068] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7d0edd4-9774-4ccb-82df-56e29964df32 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.219191] env[68674]: INFO nova.scheduler.client.report [None req-ba0be2de-5174-4a03-a086-9e1cb84f7775 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Deleted allocation for migration 21a51122-368f-4c4d-9f78-bddd3b48ff9c [ 774.393999] env[68674]: DEBUG oslo_vmware.api [None req-bca91cbe-540d-48a7-912a-2085c2495501 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Task: {'id': task-3239980, 'name': PowerOffVM_Task, 'duration_secs': 0.315213} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.397632] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bca91cbe-540d-48a7-912a-2085c2495501 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 774.397930] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bca91cbe-540d-48a7-912a-2085c2495501 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 774.402418] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e508c718-7b1c-4b1b-bde3-354d77b247fd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.420701] env[68674]: DEBUG oslo_vmware.api [None req-716da29d-dc38-425e-8baa-f65dd4405d12 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239981, 'name': Destroy_Task} progress is 33%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.462103] env[68674]: DEBUG nova.network.neutron [req-1ac1b873-80bc-4260-9517-21a9e855f238 req-fa8595bf-bb84-4929-8aef-1f5dbe8a5b3d service nova] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Updated VIF entry in instance network info cache for port 71dd9ef4-7bf9-4f8c-a04c-23431a1d8112. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 774.462995] env[68674]: DEBUG nova.network.neutron [req-1ac1b873-80bc-4260-9517-21a9e855f238 req-fa8595bf-bb84-4929-8aef-1f5dbe8a5b3d service nova] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Updating instance_info_cache with network_info: [{"id": "71dd9ef4-7bf9-4f8c-a04c-23431a1d8112", "address": "fa:16:3e:f6:e7:1f", "network": {"id": "57a5ae11-4f33-4f74-9756-ee7a71e40eea", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1400887908-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "40404e0b9c0042c58bc22c96799709af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "604056d6-6dd6-47fa-9eaa-6863a3a7c488", "external-id": "nsx-vlan-transportzone-287", "segmentation_id": 287, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71dd9ef4-7b", "ovs_interfaceid": "71dd9ef4-7bf9-4f8c-a04c-23431a1d8112", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 774.492850] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bca91cbe-540d-48a7-912a-2085c2495501 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 774.493185] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bca91cbe-540d-48a7-912a-2085c2495501 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 774.493384] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-bca91cbe-540d-48a7-912a-2085c2495501 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Deleting the datastore file [datastore2] 503e9328-bbd8-414f-8bea-250ed8247d67 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 774.494122] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-01d8eac6-aca6-499c-bbf9-869106bf4fda {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.506036] env[68674]: DEBUG oslo_vmware.api [None req-bca91cbe-540d-48a7-912a-2085c2495501 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Waiting for the task: (returnval){ [ 774.506036] env[68674]: value = "task-3239983" [ 774.506036] env[68674]: _type = "Task" [ 774.506036] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.516474] env[68674]: DEBUG oslo_vmware.api [None req-bca91cbe-540d-48a7-912a-2085c2495501 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Task: {'id': task-3239983, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.630473] env[68674]: INFO nova.compute.manager [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Took 44.93 seconds to build instance. [ 774.738072] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ba0be2de-5174-4a03-a086-9e1cb84f7775 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lock "77fa5a89-961b-4c84-a75e-a5be0253677e" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 43.069s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 774.842325] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6566ab92-0d5a-4377-acca-f1f8265bfef5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.844934] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9a12dfde-448f-463b-9e0e-da1182ca713a tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "interface-045e54ff-9e2c-4b04-afac-34cb6580cb2c-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 774.845290] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9a12dfde-448f-463b-9e0e-da1182ca713a tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "interface-045e54ff-9e2c-4b04-afac-34cb6580cb2c-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 774.845841] env[68674]: DEBUG nova.objects.instance [None req-9a12dfde-448f-463b-9e0e-da1182ca713a tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lazy-loading 'flavor' on Instance uuid 045e54ff-9e2c-4b04-afac-34cb6580cb2c {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 774.852888] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14ea5e97-59ba-45c6-aad6-294ed1667066 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.887062] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c75c6577-a847-4452-908a-04cb68d84c6d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.896641] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb5809fc-9e63-4120-9ff9-55f55780e713 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.918093] env[68674]: DEBUG nova.compute.provider_tree [None req-7ce387fe-db22-4619-9fd2-847beedf68df tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 774.924946] env[68674]: DEBUG oslo_vmware.api [None req-716da29d-dc38-425e-8baa-f65dd4405d12 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239981, 'name': Destroy_Task, 'duration_secs': 0.562209} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.925536] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-716da29d-dc38-425e-8baa-f65dd4405d12 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Destroyed the VM [ 774.925960] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-716da29d-dc38-425e-8baa-f65dd4405d12 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Deleting Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 774.926257] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-1cd06c3b-79c7-4a4c-9f1e-fb0c282bd489 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.934450] env[68674]: DEBUG oslo_vmware.api [None req-716da29d-dc38-425e-8baa-f65dd4405d12 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 774.934450] env[68674]: value = "task-3239984" [ 774.934450] env[68674]: _type = "Task" [ 774.934450] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.945082] env[68674]: DEBUG oslo_vmware.api [None req-716da29d-dc38-425e-8baa-f65dd4405d12 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239984, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.966107] env[68674]: DEBUG oslo_concurrency.lockutils [req-1ac1b873-80bc-4260-9517-21a9e855f238 req-fa8595bf-bb84-4929-8aef-1f5dbe8a5b3d service nova] Releasing lock "refresh_cache-2007222e-e4e5-44b3-bd9e-55b4a2143c3e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 775.017832] env[68674]: DEBUG oslo_vmware.api [None req-bca91cbe-540d-48a7-912a-2085c2495501 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Task: {'id': task-3239983, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.300112} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.018509] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-bca91cbe-540d-48a7-912a-2085c2495501 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 775.018874] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bca91cbe-540d-48a7-912a-2085c2495501 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 775.019252] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bca91cbe-540d-48a7-912a-2085c2495501 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 775.019645] env[68674]: INFO nova.compute.manager [None req-bca91cbe-540d-48a7-912a-2085c2495501 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Took 1.16 seconds to destroy the instance on the hypervisor. [ 775.020301] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bca91cbe-540d-48a7-912a-2085c2495501 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 775.024351] env[68674]: DEBUG nova.compute.manager [-] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 775.024351] env[68674]: DEBUG nova.network.neutron [-] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 775.133112] env[68674]: DEBUG oslo_concurrency.lockutils [None req-29274236-6f58-44e6-97d3-6a0f5b3bdc51 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lock "f029042f-d80b-453e-adc9-1e65d7da7aaf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 94.086s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 775.421877] env[68674]: DEBUG nova.scheduler.client.report [None req-7ce387fe-db22-4619-9fd2-847beedf68df tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 775.451777] env[68674]: DEBUG oslo_vmware.api [None req-716da29d-dc38-425e-8baa-f65dd4405d12 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239984, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.453341] env[68674]: DEBUG nova.objects.instance [None req-9a12dfde-448f-463b-9e0e-da1182ca713a tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lazy-loading 'pci_requests' on Instance uuid 045e54ff-9e2c-4b04-afac-34cb6580cb2c {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 775.929325] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7ce387fe-db22-4619-9fd2-847beedf68df tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.360s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 775.933045] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cc9f101a-c15b-4bc8-8ab4-e5f017a6e495 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.419s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 775.933467] env[68674]: DEBUG nova.objects.instance [None req-cc9f101a-c15b-4bc8-8ab4-e5f017a6e495 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Lazy-loading 'resources' on Instance uuid 357b515d-ef37-4688-969e-f894be30edb7 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 775.946465] env[68674]: DEBUG oslo_vmware.api [None req-716da29d-dc38-425e-8baa-f65dd4405d12 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239984, 'name': RemoveSnapshot_Task, 'duration_secs': 0.963016} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.947516] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-716da29d-dc38-425e-8baa-f65dd4405d12 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Deleted Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 775.957688] env[68674]: DEBUG nova.objects.base [None req-9a12dfde-448f-463b-9e0e-da1182ca713a tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Object Instance<045e54ff-9e2c-4b04-afac-34cb6580cb2c> lazy-loaded attributes: flavor,pci_requests {{(pid=68674) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 775.957911] env[68674]: DEBUG nova.network.neutron [None req-9a12dfde-448f-463b-9e0e-da1182ca713a tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 775.990432] env[68674]: INFO nova.scheduler.client.report [None req-7ce387fe-db22-4619-9fd2-847beedf68df tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Deleted allocations for instance 3c8459db-cc54-4644-8e4c-83c87017a186 [ 776.010937] env[68674]: DEBUG nova.policy [None req-9a12dfde-448f-463b-9e0e-da1182ca713a tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fd6c4d1912754a2ea44a65b455b7413c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '21163cbc3a5a4dc3abc832c4560c33e2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 776.098225] env[68674]: DEBUG nova.network.neutron [-] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.357770] env[68674]: DEBUG nova.network.neutron [None req-9a12dfde-448f-463b-9e0e-da1182ca713a tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Successfully created port: 09fb1c43-275f-4342-a947-5a8d1f977597 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 776.452547] env[68674]: WARNING nova.compute.manager [None req-716da29d-dc38-425e-8baa-f65dd4405d12 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Image not found during snapshot: nova.exception.ImageNotFound: Image 433cc68d-3aca-4257-a6f3-9e948eec17f6 could not be found. [ 776.502585] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7ce387fe-db22-4619-9fd2-847beedf68df tempest-AttachInterfacesV270Test-790807060 tempest-AttachInterfacesV270Test-790807060-project-member] Lock "3c8459db-cc54-4644-8e4c-83c87017a186" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.820s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 776.601445] env[68674]: INFO nova.compute.manager [-] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Took 1.58 seconds to deallocate network for instance. [ 776.898670] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99efd149-0252-43cf-9eed-10cc7e409f22 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.907590] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da628fa5-836c-4370-84bb-b68ea0d0f661 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.941367] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28f1f1ea-0e6c-41d9-8c62-61a46c55dde3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.950041] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc02ff1b-a295-47bd-b6c1-c7c540dc4caf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.964906] env[68674]: DEBUG nova.compute.provider_tree [None req-cc9f101a-c15b-4bc8-8ab4-e5f017a6e495 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 777.108953] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bca91cbe-540d-48a7-912a-2085c2495501 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 777.468733] env[68674]: DEBUG nova.scheduler.client.report [None req-cc9f101a-c15b-4bc8-8ab4-e5f017a6e495 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 777.923021] env[68674]: DEBUG oslo_vmware.rw_handles [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a36860-cd84-a298-ea72-94546584cc06/disk-0.vmdk. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 777.923021] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-634683c8-dd7f-4e00-ab72-d57f01a1041f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.928424] env[68674]: DEBUG oslo_vmware.rw_handles [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a36860-cd84-a298-ea72-94546584cc06/disk-0.vmdk is in state: ready. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 777.928564] env[68674]: ERROR oslo_vmware.rw_handles [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a36860-cd84-a298-ea72-94546584cc06/disk-0.vmdk due to incomplete transfer. [ 777.928804] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-b7eb7c34-18a8-4903-a95f-55e01eb37d9d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.935915] env[68674]: DEBUG oslo_vmware.rw_handles [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a36860-cd84-a298-ea72-94546584cc06/disk-0.vmdk. {{(pid=68674) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 777.936062] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Uploaded image cfe66e69-342f-49f5-a686-4971a7ab072c to the Glance image server {{(pid=68674) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 777.938224] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Destroying the VM {{(pid=68674) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 777.938494] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b559edc8-ab62-49f9-a34d-ba42bc042321 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.945166] env[68674]: DEBUG oslo_vmware.api [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 777.945166] env[68674]: value = "task-3239985" [ 777.945166] env[68674]: _type = "Task" [ 777.945166] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.954103] env[68674]: DEBUG oslo_vmware.api [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239985, 'name': Destroy_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.974011] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cc9f101a-c15b-4bc8-8ab4-e5f017a6e495 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.041s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 777.976744] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.926s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 777.978359] env[68674]: INFO nova.compute.claims [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 778.030016] env[68674]: INFO nova.scheduler.client.report [None req-cc9f101a-c15b-4bc8-8ab4-e5f017a6e495 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Deleted allocations for instance 357b515d-ef37-4688-969e-f894be30edb7 [ 778.226894] env[68674]: DEBUG nova.network.neutron [None req-9a12dfde-448f-463b-9e0e-da1182ca713a tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Successfully updated port: 09fb1c43-275f-4342-a947-5a8d1f977597 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 778.458637] env[68674]: DEBUG oslo_vmware.api [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239985, 'name': Destroy_Task} progress is 33%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.541191] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cc9f101a-c15b-4bc8-8ab4-e5f017a6e495 tempest-FloatingIPsAssociationTestJSON-1134619372 tempest-FloatingIPsAssociationTestJSON-1134619372-project-member] Lock "357b515d-ef37-4688-969e-f894be30edb7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.408s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 778.731999] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9a12dfde-448f-463b-9e0e-da1182ca713a tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "refresh_cache-045e54ff-9e2c-4b04-afac-34cb6580cb2c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.732220] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9a12dfde-448f-463b-9e0e-da1182ca713a tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquired lock "refresh_cache-045e54ff-9e2c-4b04-afac-34cb6580cb2c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 778.732407] env[68674]: DEBUG nova.network.neutron [None req-9a12dfde-448f-463b-9e0e-da1182ca713a tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 778.955691] env[68674]: DEBUG oslo_vmware.api [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239985, 'name': Destroy_Task, 'duration_secs': 0.603807} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.956503] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Destroyed the VM [ 778.956872] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Deleting Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 778.957218] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-631039da-2dd4-4ab4-879c-166bf7d7d34c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.964593] env[68674]: DEBUG oslo_vmware.api [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 778.964593] env[68674]: value = "task-3239986" [ 778.964593] env[68674]: _type = "Task" [ 778.964593] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.973429] env[68674]: DEBUG oslo_vmware.api [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239986, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.295086] env[68674]: WARNING nova.network.neutron [None req-9a12dfde-448f-463b-9e0e-da1182ca713a tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] a803f1d7-ea36-4d0a-9a85-9b7a8d27f698 already exists in list: networks containing: ['a803f1d7-ea36-4d0a-9a85-9b7a8d27f698']. ignoring it [ 779.350141] env[68674]: DEBUG nova.compute.manager [req-8439f244-895e-4ae5-99b1-39e2c373be18 req-51d90faa-239c-4b04-b575-16c059dfa544 service nova] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Received event network-changed-71dd9ef4-7bf9-4f8c-a04c-23431a1d8112 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 779.350141] env[68674]: DEBUG nova.compute.manager [req-8439f244-895e-4ae5-99b1-39e2c373be18 req-51d90faa-239c-4b04-b575-16c059dfa544 service nova] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Refreshing instance network info cache due to event network-changed-71dd9ef4-7bf9-4f8c-a04c-23431a1d8112. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 779.350141] env[68674]: DEBUG oslo_concurrency.lockutils [req-8439f244-895e-4ae5-99b1-39e2c373be18 req-51d90faa-239c-4b04-b575-16c059dfa544 service nova] Acquiring lock "refresh_cache-2007222e-e4e5-44b3-bd9e-55b4a2143c3e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.350141] env[68674]: DEBUG oslo_concurrency.lockutils [req-8439f244-895e-4ae5-99b1-39e2c373be18 req-51d90faa-239c-4b04-b575-16c059dfa544 service nova] Acquired lock "refresh_cache-2007222e-e4e5-44b3-bd9e-55b4a2143c3e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 779.350141] env[68674]: DEBUG nova.network.neutron [req-8439f244-895e-4ae5-99b1-39e2c373be18 req-51d90faa-239c-4b04-b575-16c059dfa544 service nova] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Refreshing network info cache for port 71dd9ef4-7bf9-4f8c-a04c-23431a1d8112 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 779.375640] env[68674]: DEBUG nova.compute.manager [req-ae761203-92dd-4cec-b8af-95b37ab722ae req-96f3975d-c857-43af-929f-09a7ea9e1234 service nova] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Received event network-vif-deleted-b9f6f4d8-6ea5-4035-a9e6-ed1462036f63 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 779.475467] env[68674]: DEBUG oslo_vmware.api [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239986, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.537240] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1756092f-d046-4181-82c8-a493f2480fc5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.545074] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aa14e78-e665-44df-91bf-17a190ce2f38 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.581066] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c1a470e-83ea-44fc-bcf8-30a685202424 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.589349] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e657bcd9-f686-44b0-9d2b-b68ecaf3a8e2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.608440] env[68674]: DEBUG nova.compute.provider_tree [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 779.719236] env[68674]: DEBUG nova.network.neutron [None req-9a12dfde-448f-463b-9e0e-da1182ca713a tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Updating instance_info_cache with network_info: [{"id": "55160236-eb1d-47d3-bca8-d3b46267c37f", "address": "fa:16:3e:3e:26:3e", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55160236-eb", "ovs_interfaceid": "55160236-eb1d-47d3-bca8-d3b46267c37f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "09fb1c43-275f-4342-a947-5a8d1f977597", "address": "fa:16:3e:88:bd:15", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09fb1c43-27", "ovs_interfaceid": "09fb1c43-275f-4342-a947-5a8d1f977597", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 779.978066] env[68674]: DEBUG oslo_vmware.api [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239986, 'name': RemoveSnapshot_Task, 'duration_secs': 0.622457} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.978420] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Deleted Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 779.978677] env[68674]: INFO nova.compute.manager [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Took 15.90 seconds to snapshot the instance on the hypervisor. [ 780.114000] env[68674]: DEBUG nova.scheduler.client.report [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 780.181350] env[68674]: DEBUG nova.network.neutron [req-8439f244-895e-4ae5-99b1-39e2c373be18 req-51d90faa-239c-4b04-b575-16c059dfa544 service nova] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Updated VIF entry in instance network info cache for port 71dd9ef4-7bf9-4f8c-a04c-23431a1d8112. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 780.181851] env[68674]: DEBUG nova.network.neutron [req-8439f244-895e-4ae5-99b1-39e2c373be18 req-51d90faa-239c-4b04-b575-16c059dfa544 service nova] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Updating instance_info_cache with network_info: [{"id": "71dd9ef4-7bf9-4f8c-a04c-23431a1d8112", "address": "fa:16:3e:f6:e7:1f", "network": {"id": "57a5ae11-4f33-4f74-9756-ee7a71e40eea", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1400887908-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.244", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "40404e0b9c0042c58bc22c96799709af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "604056d6-6dd6-47fa-9eaa-6863a3a7c488", "external-id": "nsx-vlan-transportzone-287", "segmentation_id": 287, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71dd9ef4-7b", "ovs_interfaceid": "71dd9ef4-7bf9-4f8c-a04c-23431a1d8112", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.221908] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9a12dfde-448f-463b-9e0e-da1182ca713a tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Releasing lock "refresh_cache-045e54ff-9e2c-4b04-afac-34cb6580cb2c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 780.222881] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9a12dfde-448f-463b-9e0e-da1182ca713a tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "045e54ff-9e2c-4b04-afac-34cb6580cb2c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.222881] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9a12dfde-448f-463b-9e0e-da1182ca713a tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquired lock "045e54ff-9e2c-4b04-afac-34cb6580cb2c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 780.223775] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34133f09-e037-460b-af20-b91ebd9dbb06 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.243362] env[68674]: DEBUG nova.virt.hardware [None req-9a12dfde-448f-463b-9e0e-da1182ca713a tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 780.243694] env[68674]: DEBUG nova.virt.hardware [None req-9a12dfde-448f-463b-9e0e-da1182ca713a tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 780.243760] env[68674]: DEBUG nova.virt.hardware [None req-9a12dfde-448f-463b-9e0e-da1182ca713a tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 780.243949] env[68674]: DEBUG nova.virt.hardware [None req-9a12dfde-448f-463b-9e0e-da1182ca713a tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 780.248210] env[68674]: DEBUG nova.virt.hardware [None req-9a12dfde-448f-463b-9e0e-da1182ca713a tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 780.248433] env[68674]: DEBUG nova.virt.hardware [None req-9a12dfde-448f-463b-9e0e-da1182ca713a tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 780.248664] env[68674]: DEBUG nova.virt.hardware [None req-9a12dfde-448f-463b-9e0e-da1182ca713a tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 780.248831] env[68674]: DEBUG nova.virt.hardware [None req-9a12dfde-448f-463b-9e0e-da1182ca713a tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 780.249014] env[68674]: DEBUG nova.virt.hardware [None req-9a12dfde-448f-463b-9e0e-da1182ca713a tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 780.249186] env[68674]: DEBUG nova.virt.hardware [None req-9a12dfde-448f-463b-9e0e-da1182ca713a tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 780.249358] env[68674]: DEBUG nova.virt.hardware [None req-9a12dfde-448f-463b-9e0e-da1182ca713a tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 780.257427] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9a12dfde-448f-463b-9e0e-da1182ca713a tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Reconfiguring VM to attach interface {{(pid=68674) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 780.257846] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d1fc7b2-f756-42c1-98bc-63f9aed9bfe9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.278820] env[68674]: DEBUG oslo_vmware.api [None req-9a12dfde-448f-463b-9e0e-da1182ca713a tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 780.278820] env[68674]: value = "task-3239987" [ 780.278820] env[68674]: _type = "Task" [ 780.278820] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.288598] env[68674]: DEBUG oslo_vmware.api [None req-9a12dfde-448f-463b-9e0e-da1182ca713a tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3239987, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.553187] env[68674]: DEBUG nova.compute.manager [None req-bf8c29a6-d3a3-4580-a7b5-46bd335b2b7b tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Found 1 images (rotation: 2) {{(pid=68674) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 780.618899] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.642s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 780.620991] env[68674]: DEBUG nova.compute.manager [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 780.625048] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.944s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 780.629442] env[68674]: INFO nova.compute.claims [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 780.684512] env[68674]: DEBUG oslo_concurrency.lockutils [req-8439f244-895e-4ae5-99b1-39e2c373be18 req-51d90faa-239c-4b04-b575-16c059dfa544 service nova] Releasing lock "refresh_cache-2007222e-e4e5-44b3-bd9e-55b4a2143c3e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 780.793624] env[68674]: DEBUG oslo_vmware.api [None req-9a12dfde-448f-463b-9e0e-da1182ca713a tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3239987, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.136629] env[68674]: DEBUG nova.compute.utils [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 781.138077] env[68674]: DEBUG nova.compute.manager [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 781.138258] env[68674]: DEBUG nova.network.neutron [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 781.218049] env[68674]: DEBUG nova.policy [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0bc8df4f271e4330b3874e04b792a537', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a84d9d6e23bd40049c34e6f087252b4e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 781.290637] env[68674]: DEBUG oslo_vmware.api [None req-9a12dfde-448f-463b-9e0e-da1182ca713a tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3239987, 'name': ReconfigVM_Task, 'duration_secs': 0.758857} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.292773] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9a12dfde-448f-463b-9e0e-da1182ca713a tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Releasing lock "045e54ff-9e2c-4b04-afac-34cb6580cb2c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 781.293111] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9a12dfde-448f-463b-9e0e-da1182ca713a tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Reconfigured VM to attach interface {{(pid=68674) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 781.647282] env[68674]: DEBUG nova.compute.manager [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 781.757173] env[68674]: DEBUG nova.network.neutron [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Successfully created port: c9e9cacf-59be-4854-a352-111921b372e2 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 781.802461] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9a12dfde-448f-463b-9e0e-da1182ca713a tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "interface-045e54ff-9e2c-4b04-afac-34cb6580cb2c-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.957s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 782.178511] env[68674]: DEBUG oslo_vmware.rw_handles [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52500686-3daf-f132-58af-e0d0971456c6/disk-0.vmdk. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 782.179749] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93391705-3e8f-4865-b722-e7ae4ec3af02 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.190545] env[68674]: DEBUG oslo_vmware.rw_handles [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52500686-3daf-f132-58af-e0d0971456c6/disk-0.vmdk is in state: ready. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 782.190545] env[68674]: ERROR oslo_vmware.rw_handles [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52500686-3daf-f132-58af-e0d0971456c6/disk-0.vmdk due to incomplete transfer. [ 782.190785] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-a8ba3c23-e271-411e-ba96-9698a7814447 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.198726] env[68674]: DEBUG oslo_vmware.rw_handles [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52500686-3daf-f132-58af-e0d0971456c6/disk-0.vmdk. {{(pid=68674) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 782.198939] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Uploaded image 8dd8deab-1e1d-41ab-84e6-385a2c096d5f to the Glance image server {{(pid=68674) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 782.200870] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Destroying the VM {{(pid=68674) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 782.203573] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a8fae183-fbf0-4f6d-9e1b-b0831158e236 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.210220] env[68674]: DEBUG oslo_vmware.api [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for the task: (returnval){ [ 782.210220] env[68674]: value = "task-3239988" [ 782.210220] env[68674]: _type = "Task" [ 782.210220] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.223863] env[68674]: DEBUG oslo_vmware.api [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239988, 'name': Destroy_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.270821] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2964d79-82b4-41ff-8f77-c970b815dacb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.280377] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35a66d19-261a-4e6b-b9ae-c2aefe1a808d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.316306] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d1ab74c-3154-4996-9cd2-1050b6b4f1f7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.324566] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c957062a-b9ff-45a3-9ce3-923786ebae7c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.340725] env[68674]: DEBUG nova.compute.provider_tree [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 782.658789] env[68674]: DEBUG nova.compute.manager [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 782.702753] env[68674]: DEBUG nova.virt.hardware [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 782.703198] env[68674]: DEBUG nova.virt.hardware [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 782.703443] env[68674]: DEBUG nova.virt.hardware [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 782.703656] env[68674]: DEBUG nova.virt.hardware [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 782.703819] env[68674]: DEBUG nova.virt.hardware [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 782.703983] env[68674]: DEBUG nova.virt.hardware [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 782.704590] env[68674]: DEBUG nova.virt.hardware [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 782.704860] env[68674]: DEBUG nova.virt.hardware [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 782.705148] env[68674]: DEBUG nova.virt.hardware [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 782.705408] env[68674]: DEBUG nova.virt.hardware [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 782.705680] env[68674]: DEBUG nova.virt.hardware [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 782.706654] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65da24ee-c349-4772-8311-70169fe09c86 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.717920] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbcb9e52-8db7-4de1-92b6-8f35eb4488ed {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.725433] env[68674]: DEBUG oslo_vmware.api [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239988, 'name': Destroy_Task} progress is 33%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.735573] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a0c0196a-50df-4b97-81df-52758ae61f0e tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Acquiring lock "55727bbc-6b65-4e4c-ba4f-8240efbf052a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 782.735742] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a0c0196a-50df-4b97-81df-52758ae61f0e tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Lock "55727bbc-6b65-4e4c-ba4f-8240efbf052a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 782.736045] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a0c0196a-50df-4b97-81df-52758ae61f0e tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Acquiring lock "55727bbc-6b65-4e4c-ba4f-8240efbf052a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 782.736294] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a0c0196a-50df-4b97-81df-52758ae61f0e tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Lock "55727bbc-6b65-4e4c-ba4f-8240efbf052a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 782.736519] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a0c0196a-50df-4b97-81df-52758ae61f0e tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Lock "55727bbc-6b65-4e4c-ba4f-8240efbf052a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 782.740419] env[68674]: INFO nova.compute.manager [None req-a0c0196a-50df-4b97-81df-52758ae61f0e tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Terminating instance [ 782.849310] env[68674]: DEBUG nova.scheduler.client.report [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 782.980429] env[68674]: DEBUG nova.compute.manager [req-5b95e29c-a4c5-497f-afc5-f5007d237f0e req-88343086-e989-4a5b-b2e4-b9834f9625e4 service nova] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Received event network-changed-71dd9ef4-7bf9-4f8c-a04c-23431a1d8112 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 782.980699] env[68674]: DEBUG nova.compute.manager [req-5b95e29c-a4c5-497f-afc5-f5007d237f0e req-88343086-e989-4a5b-b2e4-b9834f9625e4 service nova] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Refreshing instance network info cache due to event network-changed-71dd9ef4-7bf9-4f8c-a04c-23431a1d8112. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 782.980934] env[68674]: DEBUG oslo_concurrency.lockutils [req-5b95e29c-a4c5-497f-afc5-f5007d237f0e req-88343086-e989-4a5b-b2e4-b9834f9625e4 service nova] Acquiring lock "refresh_cache-2007222e-e4e5-44b3-bd9e-55b4a2143c3e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.981239] env[68674]: DEBUG oslo_concurrency.lockutils [req-5b95e29c-a4c5-497f-afc5-f5007d237f0e req-88343086-e989-4a5b-b2e4-b9834f9625e4 service nova] Acquired lock "refresh_cache-2007222e-e4e5-44b3-bd9e-55b4a2143c3e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 782.981437] env[68674]: DEBUG nova.network.neutron [req-5b95e29c-a4c5-497f-afc5-f5007d237f0e req-88343086-e989-4a5b-b2e4-b9834f9625e4 service nova] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Refreshing network info cache for port 71dd9ef4-7bf9-4f8c-a04c-23431a1d8112 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 783.003450] env[68674]: DEBUG nova.compute.manager [req-c4845823-86ac-4ac7-8f45-4625884bdaf0 req-df80a61f-7d91-4395-9a49-2921af37cbe4 service nova] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Received event network-vif-plugged-09fb1c43-275f-4342-a947-5a8d1f977597 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 783.003669] env[68674]: DEBUG oslo_concurrency.lockutils [req-c4845823-86ac-4ac7-8f45-4625884bdaf0 req-df80a61f-7d91-4395-9a49-2921af37cbe4 service nova] Acquiring lock "045e54ff-9e2c-4b04-afac-34cb6580cb2c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 783.003892] env[68674]: DEBUG oslo_concurrency.lockutils [req-c4845823-86ac-4ac7-8f45-4625884bdaf0 req-df80a61f-7d91-4395-9a49-2921af37cbe4 service nova] Lock "045e54ff-9e2c-4b04-afac-34cb6580cb2c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 783.004105] env[68674]: DEBUG oslo_concurrency.lockutils [req-c4845823-86ac-4ac7-8f45-4625884bdaf0 req-df80a61f-7d91-4395-9a49-2921af37cbe4 service nova] Lock "045e54ff-9e2c-4b04-afac-34cb6580cb2c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 783.004219] env[68674]: DEBUG nova.compute.manager [req-c4845823-86ac-4ac7-8f45-4625884bdaf0 req-df80a61f-7d91-4395-9a49-2921af37cbe4 service nova] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] No waiting events found dispatching network-vif-plugged-09fb1c43-275f-4342-a947-5a8d1f977597 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 783.004374] env[68674]: WARNING nova.compute.manager [req-c4845823-86ac-4ac7-8f45-4625884bdaf0 req-df80a61f-7d91-4395-9a49-2921af37cbe4 service nova] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Received unexpected event network-vif-plugged-09fb1c43-275f-4342-a947-5a8d1f977597 for instance with vm_state active and task_state None. [ 783.004533] env[68674]: DEBUG nova.compute.manager [req-c4845823-86ac-4ac7-8f45-4625884bdaf0 req-df80a61f-7d91-4395-9a49-2921af37cbe4 service nova] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Received event network-changed-09fb1c43-275f-4342-a947-5a8d1f977597 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 783.004687] env[68674]: DEBUG nova.compute.manager [req-c4845823-86ac-4ac7-8f45-4625884bdaf0 req-df80a61f-7d91-4395-9a49-2921af37cbe4 service nova] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Refreshing instance network info cache due to event network-changed-09fb1c43-275f-4342-a947-5a8d1f977597. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 783.004868] env[68674]: DEBUG oslo_concurrency.lockutils [req-c4845823-86ac-4ac7-8f45-4625884bdaf0 req-df80a61f-7d91-4395-9a49-2921af37cbe4 service nova] Acquiring lock "refresh_cache-045e54ff-9e2c-4b04-afac-34cb6580cb2c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.005016] env[68674]: DEBUG oslo_concurrency.lockutils [req-c4845823-86ac-4ac7-8f45-4625884bdaf0 req-df80a61f-7d91-4395-9a49-2921af37cbe4 service nova] Acquired lock "refresh_cache-045e54ff-9e2c-4b04-afac-34cb6580cb2c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 783.005178] env[68674]: DEBUG nova.network.neutron [req-c4845823-86ac-4ac7-8f45-4625884bdaf0 req-df80a61f-7d91-4395-9a49-2921af37cbe4 service nova] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Refreshing network info cache for port 09fb1c43-275f-4342-a947-5a8d1f977597 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 783.221084] env[68674]: DEBUG oslo_vmware.api [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239988, 'name': Destroy_Task, 'duration_secs': 0.663996} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.221479] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Destroyed the VM [ 783.222483] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Deleting Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 783.222818] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-14bf1e32-4226-4e69-baa7-1e5f45ae763e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.229970] env[68674]: DEBUG oslo_vmware.api [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for the task: (returnval){ [ 783.229970] env[68674]: value = "task-3239989" [ 783.229970] env[68674]: _type = "Task" [ 783.229970] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.238403] env[68674]: DEBUG oslo_vmware.api [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239989, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.247650] env[68674]: DEBUG nova.compute.manager [None req-a0c0196a-50df-4b97-81df-52758ae61f0e tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 783.247650] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a0c0196a-50df-4b97-81df-52758ae61f0e tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 783.247650] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f849a3a-d109-4abc-9ced-6d0b779ec566 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.256383] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0c0196a-50df-4b97-81df-52758ae61f0e tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 783.256667] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c100c335-9eaa-47a6-aaa2-2aac8f85a227 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.264789] env[68674]: DEBUG oslo_vmware.api [None req-a0c0196a-50df-4b97-81df-52758ae61f0e tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 783.264789] env[68674]: value = "task-3239990" [ 783.264789] env[68674]: _type = "Task" [ 783.264789] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.280822] env[68674]: DEBUG oslo_vmware.api [None req-a0c0196a-50df-4b97-81df-52758ae61f0e tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239990, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.355064] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.730s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 783.357747] env[68674]: DEBUG nova.compute.manager [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 783.360946] env[68674]: DEBUG oslo_concurrency.lockutils [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.402s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 783.361229] env[68674]: DEBUG nova.objects.instance [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Lazy-loading 'resources' on Instance uuid 6278d756-139c-4fcd-bf31-304c978d6682 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 783.739492] env[68674]: DEBUG oslo_vmware.api [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239989, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.744081] env[68674]: DEBUG nova.compute.manager [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 783.744902] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9489481-9bd4-49d8-afd8-c815f36eaac3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.774711] env[68674]: DEBUG oslo_vmware.api [None req-a0c0196a-50df-4b97-81df-52758ae61f0e tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239990, 'name': PowerOffVM_Task, 'duration_secs': 0.345909} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.774976] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0c0196a-50df-4b97-81df-52758ae61f0e tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 783.775166] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a0c0196a-50df-4b97-81df-52758ae61f0e tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 783.775416] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d6607ed2-fa8b-45cd-bbcb-50ac4dbc6fe8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.864491] env[68674]: DEBUG nova.compute.utils [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 783.869526] env[68674]: DEBUG nova.compute.manager [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 783.869699] env[68674]: DEBUG nova.network.neutron [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 783.988614] env[68674]: DEBUG nova.policy [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1891413e35f845a2b761f474df3eb6c8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3dceab4b22c34737bc85ee5a5ded00d3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 784.128332] env[68674]: DEBUG nova.network.neutron [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Successfully updated port: c9e9cacf-59be-4854-a352-111921b372e2 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 784.183828] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a0c0196a-50df-4b97-81df-52758ae61f0e tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 784.184078] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a0c0196a-50df-4b97-81df-52758ae61f0e tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 784.184272] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0c0196a-50df-4b97-81df-52758ae61f0e tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Deleting the datastore file [datastore2] 55727bbc-6b65-4e4c-ba4f-8240efbf052a {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 784.184563] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-edaaaee3-7263-4b42-b656-01d3670feb29 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.194622] env[68674]: DEBUG oslo_vmware.api [None req-a0c0196a-50df-4b97-81df-52758ae61f0e tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 784.194622] env[68674]: value = "task-3239992" [ 784.194622] env[68674]: _type = "Task" [ 784.194622] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.203699] env[68674]: DEBUG oslo_vmware.api [None req-a0c0196a-50df-4b97-81df-52758ae61f0e tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239992, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.212459] env[68674]: DEBUG nova.network.neutron [req-5b95e29c-a4c5-497f-afc5-f5007d237f0e req-88343086-e989-4a5b-b2e4-b9834f9625e4 service nova] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Updated VIF entry in instance network info cache for port 71dd9ef4-7bf9-4f8c-a04c-23431a1d8112. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 784.212864] env[68674]: DEBUG nova.network.neutron [req-5b95e29c-a4c5-497f-afc5-f5007d237f0e req-88343086-e989-4a5b-b2e4-b9834f9625e4 service nova] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Updating instance_info_cache with network_info: [{"id": "71dd9ef4-7bf9-4f8c-a04c-23431a1d8112", "address": "fa:16:3e:f6:e7:1f", "network": {"id": "57a5ae11-4f33-4f74-9756-ee7a71e40eea", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1400887908-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "40404e0b9c0042c58bc22c96799709af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "604056d6-6dd6-47fa-9eaa-6863a3a7c488", "external-id": "nsx-vlan-transportzone-287", "segmentation_id": 287, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71dd9ef4-7b", "ovs_interfaceid": "71dd9ef4-7bf9-4f8c-a04c-23431a1d8112", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.242440] env[68674]: DEBUG oslo_vmware.api [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239989, 'name': RemoveSnapshot_Task} progress is 31%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.249730] env[68674]: DEBUG nova.network.neutron [req-c4845823-86ac-4ac7-8f45-4625884bdaf0 req-df80a61f-7d91-4395-9a49-2921af37cbe4 service nova] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Updated VIF entry in instance network info cache for port 09fb1c43-275f-4342-a947-5a8d1f977597. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 784.250157] env[68674]: DEBUG nova.network.neutron [req-c4845823-86ac-4ac7-8f45-4625884bdaf0 req-df80a61f-7d91-4395-9a49-2921af37cbe4 service nova] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Updating instance_info_cache with network_info: [{"id": "55160236-eb1d-47d3-bca8-d3b46267c37f", "address": "fa:16:3e:3e:26:3e", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55160236-eb", "ovs_interfaceid": "55160236-eb1d-47d3-bca8-d3b46267c37f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "09fb1c43-275f-4342-a947-5a8d1f977597", "address": "fa:16:3e:88:bd:15", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09fb1c43-27", "ovs_interfaceid": "09fb1c43-275f-4342-a947-5a8d1f977597", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.254480] env[68674]: INFO nova.compute.manager [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] instance snapshotting [ 784.255065] env[68674]: DEBUG nova.objects.instance [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lazy-loading 'flavor' on Instance uuid 3d85c8c4-f09c-4f75-aff5-9a49d84ae006 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 784.373957] env[68674]: DEBUG nova.compute.manager [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 784.380129] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3496670b-8e74-46a3-8c5b-1521a9c3120a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.388187] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f89cc53c-9010-4b86-9475-da5307dba059 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.424254] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0699a5d-18ef-4989-bb48-6c7507a9bdbe {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.432347] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9724671-5167-47f1-926d-1daf1a51be97 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.453429] env[68674]: DEBUG nova.compute.provider_tree [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 784.637072] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquiring lock "refresh_cache-5c12cb5d-821c-4e63-86a0-dadc9794a8ba" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.637072] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquired lock "refresh_cache-5c12cb5d-821c-4e63-86a0-dadc9794a8ba" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 784.637072] env[68674]: DEBUG nova.network.neutron [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 784.689773] env[68674]: DEBUG nova.network.neutron [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Successfully created port: 9426039c-799a-4219-8e85-2ab029f56643 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 784.729176] env[68674]: DEBUG oslo_concurrency.lockutils [req-5b95e29c-a4c5-497f-afc5-f5007d237f0e req-88343086-e989-4a5b-b2e4-b9834f9625e4 service nova] Releasing lock "refresh_cache-2007222e-e4e5-44b3-bd9e-55b4a2143c3e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 784.729722] env[68674]: DEBUG oslo_vmware.api [None req-a0c0196a-50df-4b97-81df-52758ae61f0e tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3239992, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.463005} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.730346] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0c0196a-50df-4b97-81df-52758ae61f0e tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 784.731595] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a0c0196a-50df-4b97-81df-52758ae61f0e tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 784.731595] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a0c0196a-50df-4b97-81df-52758ae61f0e tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 784.731595] env[68674]: INFO nova.compute.manager [None req-a0c0196a-50df-4b97-81df-52758ae61f0e tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Took 1.49 seconds to destroy the instance on the hypervisor. [ 784.731595] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a0c0196a-50df-4b97-81df-52758ae61f0e tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 784.731863] env[68674]: DEBUG nova.compute.manager [-] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 784.739170] env[68674]: DEBUG nova.network.neutron [-] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 784.749992] env[68674]: DEBUG oslo_vmware.api [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3239989, 'name': RemoveSnapshot_Task, 'duration_secs': 1.252129} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.750285] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Deleted Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 784.750497] env[68674]: INFO nova.compute.manager [None req-a81b8158-4378-4f5a-8210-a7d6c8a85bef tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Took 16.45 seconds to snapshot the instance on the hypervisor. [ 784.753566] env[68674]: DEBUG oslo_concurrency.lockutils [req-c4845823-86ac-4ac7-8f45-4625884bdaf0 req-df80a61f-7d91-4395-9a49-2921af37cbe4 service nova] Releasing lock "refresh_cache-045e54ff-9e2c-4b04-afac-34cb6580cb2c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 784.763539] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a81a0f59-d818-453c-b750-8b0753dd1d41 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.785798] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a079c7db-db3a-412a-badb-d0834b187c85 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.963661] env[68674]: DEBUG nova.scheduler.client.report [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 785.175645] env[68674]: DEBUG nova.network.neutron [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 785.270415] env[68674]: DEBUG nova.compute.manager [req-661250c5-ec59-4dd2-bb98-81371db0af58 req-55aabf7f-dfc2-4801-8a03-645547fec04b service nova] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Received event network-changed-71dd9ef4-7bf9-4f8c-a04c-23431a1d8112 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 785.270683] env[68674]: DEBUG nova.compute.manager [req-661250c5-ec59-4dd2-bb98-81371db0af58 req-55aabf7f-dfc2-4801-8a03-645547fec04b service nova] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Refreshing instance network info cache due to event network-changed-71dd9ef4-7bf9-4f8c-a04c-23431a1d8112. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 785.270830] env[68674]: DEBUG oslo_concurrency.lockutils [req-661250c5-ec59-4dd2-bb98-81371db0af58 req-55aabf7f-dfc2-4801-8a03-645547fec04b service nova] Acquiring lock "refresh_cache-2007222e-e4e5-44b3-bd9e-55b4a2143c3e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.270973] env[68674]: DEBUG oslo_concurrency.lockutils [req-661250c5-ec59-4dd2-bb98-81371db0af58 req-55aabf7f-dfc2-4801-8a03-645547fec04b service nova] Acquired lock "refresh_cache-2007222e-e4e5-44b3-bd9e-55b4a2143c3e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 785.271156] env[68674]: DEBUG nova.network.neutron [req-661250c5-ec59-4dd2-bb98-81371db0af58 req-55aabf7f-dfc2-4801-8a03-645547fec04b service nova] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Refreshing network info cache for port 71dd9ef4-7bf9-4f8c-a04c-23431a1d8112 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 785.295468] env[68674]: DEBUG nova.compute.manager [req-da72a3ac-8dfe-4856-8f4e-5633f57aad81 req-5fb1f03e-08a3-46ad-a448-f8ff94ea7aec service nova] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Received event network-vif-plugged-c9e9cacf-59be-4854-a352-111921b372e2 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 785.295697] env[68674]: DEBUG oslo_concurrency.lockutils [req-da72a3ac-8dfe-4856-8f4e-5633f57aad81 req-5fb1f03e-08a3-46ad-a448-f8ff94ea7aec service nova] Acquiring lock "5c12cb5d-821c-4e63-86a0-dadc9794a8ba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 785.295912] env[68674]: DEBUG oslo_concurrency.lockutils [req-da72a3ac-8dfe-4856-8f4e-5633f57aad81 req-5fb1f03e-08a3-46ad-a448-f8ff94ea7aec service nova] Lock "5c12cb5d-821c-4e63-86a0-dadc9794a8ba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 785.296095] env[68674]: DEBUG oslo_concurrency.lockutils [req-da72a3ac-8dfe-4856-8f4e-5633f57aad81 req-5fb1f03e-08a3-46ad-a448-f8ff94ea7aec service nova] Lock "5c12cb5d-821c-4e63-86a0-dadc9794a8ba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.296269] env[68674]: DEBUG nova.compute.manager [req-da72a3ac-8dfe-4856-8f4e-5633f57aad81 req-5fb1f03e-08a3-46ad-a448-f8ff94ea7aec service nova] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] No waiting events found dispatching network-vif-plugged-c9e9cacf-59be-4854-a352-111921b372e2 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 785.296436] env[68674]: WARNING nova.compute.manager [req-da72a3ac-8dfe-4856-8f4e-5633f57aad81 req-5fb1f03e-08a3-46ad-a448-f8ff94ea7aec service nova] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Received unexpected event network-vif-plugged-c9e9cacf-59be-4854-a352-111921b372e2 for instance with vm_state building and task_state spawning. [ 785.296608] env[68674]: DEBUG nova.compute.manager [req-da72a3ac-8dfe-4856-8f4e-5633f57aad81 req-5fb1f03e-08a3-46ad-a448-f8ff94ea7aec service nova] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Received event network-changed-c9e9cacf-59be-4854-a352-111921b372e2 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 785.296747] env[68674]: DEBUG nova.compute.manager [req-da72a3ac-8dfe-4856-8f4e-5633f57aad81 req-5fb1f03e-08a3-46ad-a448-f8ff94ea7aec service nova] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Refreshing instance network info cache due to event network-changed-c9e9cacf-59be-4854-a352-111921b372e2. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 785.296912] env[68674]: DEBUG oslo_concurrency.lockutils [req-da72a3ac-8dfe-4856-8f4e-5633f57aad81 req-5fb1f03e-08a3-46ad-a448-f8ff94ea7aec service nova] Acquiring lock "refresh_cache-5c12cb5d-821c-4e63-86a0-dadc9794a8ba" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.298266] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Creating Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 785.298563] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-149b7ae2-8970-40eb-b82c-3e312601b03a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.312124] env[68674]: DEBUG oslo_vmware.api [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 785.312124] env[68674]: value = "task-3239993" [ 785.312124] env[68674]: _type = "Task" [ 785.312124] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.318251] env[68674]: DEBUG nova.network.neutron [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Updating instance_info_cache with network_info: [{"id": "c9e9cacf-59be-4854-a352-111921b372e2", "address": "fa:16:3e:d1:a3:98", "network": {"id": "1674c138-dbec-4d03-b5b0-d1944ab38577", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-143958570-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a84d9d6e23bd40049c34e6f087252b4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9e9cacf-59", "ovs_interfaceid": "c9e9cacf-59be-4854-a352-111921b372e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.325674] env[68674]: DEBUG oslo_vmware.api [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239993, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.383520] env[68674]: DEBUG nova.compute.manager [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 785.407801] env[68674]: DEBUG nova.virt.hardware [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 785.408013] env[68674]: DEBUG nova.virt.hardware [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 785.408192] env[68674]: DEBUG nova.virt.hardware [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 785.408396] env[68674]: DEBUG nova.virt.hardware [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 785.408549] env[68674]: DEBUG nova.virt.hardware [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 785.408765] env[68674]: DEBUG nova.virt.hardware [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 785.408944] env[68674]: DEBUG nova.virt.hardware [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 785.409190] env[68674]: DEBUG nova.virt.hardware [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 785.409390] env[68674]: DEBUG nova.virt.hardware [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 785.409578] env[68674]: DEBUG nova.virt.hardware [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 785.409787] env[68674]: DEBUG nova.virt.hardware [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 785.410653] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d7253aa-bf11-43a6-8ef6-7a28cdc03bbd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.420282] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb115d90-d480-4091-bc32-69e12946e40a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.470135] env[68674]: DEBUG oslo_concurrency.lockutils [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.109s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.473786] env[68674]: DEBUG oslo_concurrency.lockutils [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.113s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 785.475349] env[68674]: INFO nova.compute.claims [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 785.556376] env[68674]: INFO nova.scheduler.client.report [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Deleted allocations for instance 6278d756-139c-4fcd-bf31-304c978d6682 [ 785.659860] env[68674]: DEBUG nova.network.neutron [-] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.755144] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8b93b481-0c89-4ea8-bca5-47a851d7abf4 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Acquiring lock "2007222e-e4e5-44b3-bd9e-55b4a2143c3e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 785.755446] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8b93b481-0c89-4ea8-bca5-47a851d7abf4 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Lock "2007222e-e4e5-44b3-bd9e-55b4a2143c3e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 785.755683] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8b93b481-0c89-4ea8-bca5-47a851d7abf4 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Acquiring lock "2007222e-e4e5-44b3-bd9e-55b4a2143c3e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 785.755875] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8b93b481-0c89-4ea8-bca5-47a851d7abf4 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Lock "2007222e-e4e5-44b3-bd9e-55b4a2143c3e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 785.756148] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8b93b481-0c89-4ea8-bca5-47a851d7abf4 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Lock "2007222e-e4e5-44b3-bd9e-55b4a2143c3e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.758345] env[68674]: INFO nova.compute.manager [None req-8b93b481-0c89-4ea8-bca5-47a851d7abf4 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Terminating instance [ 785.824532] env[68674]: DEBUG oslo_vmware.api [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239993, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.827118] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Releasing lock "refresh_cache-5c12cb5d-821c-4e63-86a0-dadc9794a8ba" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 785.827438] env[68674]: DEBUG nova.compute.manager [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Instance network_info: |[{"id": "c9e9cacf-59be-4854-a352-111921b372e2", "address": "fa:16:3e:d1:a3:98", "network": {"id": "1674c138-dbec-4d03-b5b0-d1944ab38577", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-143958570-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a84d9d6e23bd40049c34e6f087252b4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9e9cacf-59", "ovs_interfaceid": "c9e9cacf-59be-4854-a352-111921b372e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 785.827727] env[68674]: DEBUG oslo_concurrency.lockutils [req-da72a3ac-8dfe-4856-8f4e-5633f57aad81 req-5fb1f03e-08a3-46ad-a448-f8ff94ea7aec service nova] Acquired lock "refresh_cache-5c12cb5d-821c-4e63-86a0-dadc9794a8ba" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 785.827908] env[68674]: DEBUG nova.network.neutron [req-da72a3ac-8dfe-4856-8f4e-5633f57aad81 req-5fb1f03e-08a3-46ad-a448-f8ff94ea7aec service nova] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Refreshing network info cache for port c9e9cacf-59be-4854-a352-111921b372e2 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 785.829089] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d1:a3:98', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24ec44b7-0acf-4ff9-8bb3-4641b74af7a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c9e9cacf-59be-4854-a352-111921b372e2', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 785.836954] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 785.840122] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 785.840753] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-307de412-b66f-440a-849d-6a10eaa507a7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.856541] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7d5369f9-08fb-42da-a4fd-0c178e816bba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "interface-045e54ff-9e2c-4b04-afac-34cb6580cb2c-09fb1c43-275f-4342-a947-5a8d1f977597" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 785.856780] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7d5369f9-08fb-42da-a4fd-0c178e816bba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "interface-045e54ff-9e2c-4b04-afac-34cb6580cb2c-09fb1c43-275f-4342-a947-5a8d1f977597" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 785.865892] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 785.865892] env[68674]: value = "task-3239994" [ 785.865892] env[68674]: _type = "Task" [ 785.865892] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.874748] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239994, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.065461] env[68674]: DEBUG oslo_concurrency.lockutils [None req-86981d85-0d69-4cd5-97ed-9d3a45a3bc8d tempest-ServersTestBootFromVolume-2095639387 tempest-ServersTestBootFromVolume-2095639387-project-member] Lock "6278d756-139c-4fcd-bf31-304c978d6682" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.774s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 786.154222] env[68674]: DEBUG nova.network.neutron [req-661250c5-ec59-4dd2-bb98-81371db0af58 req-55aabf7f-dfc2-4801-8a03-645547fec04b service nova] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Updated VIF entry in instance network info cache for port 71dd9ef4-7bf9-4f8c-a04c-23431a1d8112. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 786.154609] env[68674]: DEBUG nova.network.neutron [req-661250c5-ec59-4dd2-bb98-81371db0af58 req-55aabf7f-dfc2-4801-8a03-645547fec04b service nova] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Updating instance_info_cache with network_info: [{"id": "71dd9ef4-7bf9-4f8c-a04c-23431a1d8112", "address": "fa:16:3e:f6:e7:1f", "network": {"id": "57a5ae11-4f33-4f74-9756-ee7a71e40eea", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1400887908-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "40404e0b9c0042c58bc22c96799709af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "604056d6-6dd6-47fa-9eaa-6863a3a7c488", "external-id": "nsx-vlan-transportzone-287", "segmentation_id": 287, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71dd9ef4-7b", "ovs_interfaceid": "71dd9ef4-7bf9-4f8c-a04c-23431a1d8112", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.163078] env[68674]: INFO nova.compute.manager [-] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Took 1.43 seconds to deallocate network for instance. [ 786.250931] env[68674]: DEBUG nova.network.neutron [req-da72a3ac-8dfe-4856-8f4e-5633f57aad81 req-5fb1f03e-08a3-46ad-a448-f8ff94ea7aec service nova] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Updated VIF entry in instance network info cache for port c9e9cacf-59be-4854-a352-111921b372e2. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 786.251151] env[68674]: DEBUG nova.network.neutron [req-da72a3ac-8dfe-4856-8f4e-5633f57aad81 req-5fb1f03e-08a3-46ad-a448-f8ff94ea7aec service nova] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Updating instance_info_cache with network_info: [{"id": "c9e9cacf-59be-4854-a352-111921b372e2", "address": "fa:16:3e:d1:a3:98", "network": {"id": "1674c138-dbec-4d03-b5b0-d1944ab38577", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-143958570-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a84d9d6e23bd40049c34e6f087252b4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9e9cacf-59", "ovs_interfaceid": "c9e9cacf-59be-4854-a352-111921b372e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.265682] env[68674]: DEBUG nova.compute.manager [None req-8b93b481-0c89-4ea8-bca5-47a851d7abf4 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 786.265912] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8b93b481-0c89-4ea8-bca5-47a851d7abf4 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 786.267094] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01354e69-33c6-4bfc-888d-7dff3bbe6e77 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.278922] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b93b481-0c89-4ea8-bca5-47a851d7abf4 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 786.278922] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6e556d94-e253-4e72-bcfc-6b6b49128e3d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.284349] env[68674]: DEBUG oslo_vmware.api [None req-8b93b481-0c89-4ea8-bca5-47a851d7abf4 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Waiting for the task: (returnval){ [ 786.284349] env[68674]: value = "task-3239995" [ 786.284349] env[68674]: _type = "Task" [ 786.284349] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.299194] env[68674]: DEBUG oslo_vmware.api [None req-8b93b481-0c89-4ea8-bca5-47a851d7abf4 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': task-3239995, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.325016] env[68674]: DEBUG oslo_vmware.api [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239993, 'name': CreateSnapshot_Task, 'duration_secs': 0.627724} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.325539] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Created Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 786.327228] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ea409b6-9a22-414f-8432-11bd9761cac5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.359565] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7d5369f9-08fb-42da-a4fd-0c178e816bba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "045e54ff-9e2c-4b04-afac-34cb6580cb2c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.359565] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7d5369f9-08fb-42da-a4fd-0c178e816bba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquired lock "045e54ff-9e2c-4b04-afac-34cb6580cb2c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 786.360784] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b6bdb90-d875-45e0-9e2b-c5381150642b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.390542] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17447b2c-7fe5-4060-bb47-8f4db91412e0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.429779] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7d5369f9-08fb-42da-a4fd-0c178e816bba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Reconfiguring VM to detach interface {{(pid=68674) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 786.429779] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239994, 'name': CreateVM_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.429779] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d5cd38d4-3a31-4d53-aa17-ce89cb8aa2e0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.449609] env[68674]: DEBUG oslo_vmware.api [None req-7d5369f9-08fb-42da-a4fd-0c178e816bba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 786.449609] env[68674]: value = "task-3239996" [ 786.449609] env[68674]: _type = "Task" [ 786.449609] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.462562] env[68674]: DEBUG oslo_vmware.api [None req-7d5369f9-08fb-42da-a4fd-0c178e816bba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3239996, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.616344] env[68674]: DEBUG nova.network.neutron [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Successfully updated port: 9426039c-799a-4219-8e85-2ab029f56643 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 786.660581] env[68674]: DEBUG oslo_concurrency.lockutils [req-661250c5-ec59-4dd2-bb98-81371db0af58 req-55aabf7f-dfc2-4801-8a03-645547fec04b service nova] Releasing lock "refresh_cache-2007222e-e4e5-44b3-bd9e-55b4a2143c3e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 786.672959] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a0c0196a-50df-4b97-81df-52758ae61f0e tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 786.754620] env[68674]: DEBUG oslo_concurrency.lockutils [req-da72a3ac-8dfe-4856-8f4e-5633f57aad81 req-5fb1f03e-08a3-46ad-a448-f8ff94ea7aec service nova] Releasing lock "refresh_cache-5c12cb5d-821c-4e63-86a0-dadc9794a8ba" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 786.801935] env[68674]: DEBUG oslo_vmware.api [None req-8b93b481-0c89-4ea8-bca5-47a851d7abf4 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': task-3239995, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.848231] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Creating linked-clone VM from snapshot {{(pid=68674) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 786.851009] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f8b91d35-1978-4a6d-b93a-b140194cc297 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.859971] env[68674]: DEBUG oslo_vmware.api [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 786.859971] env[68674]: value = "task-3239997" [ 786.859971] env[68674]: _type = "Task" [ 786.859971] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.868150] env[68674]: DEBUG oslo_vmware.api [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239997, 'name': CloneVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.896547] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3239994, 'name': CreateVM_Task, 'duration_secs': 0.859231} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.896731] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 786.897497] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.897668] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 786.898016] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 786.898270] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eff0a342-d75b-478d-a470-ec0ce661070a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.910643] env[68674]: DEBUG oslo_vmware.api [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 786.910643] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52964a0d-6939-379c-fdda-01352aa6333d" [ 786.910643] env[68674]: _type = "Task" [ 786.910643] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.927079] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 786.927446] env[68674]: DEBUG oslo_vmware.api [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52964a0d-6939-379c-fdda-01352aa6333d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.927653] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 786.964674] env[68674]: DEBUG oslo_vmware.api [None req-7d5369f9-08fb-42da-a4fd-0c178e816bba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3239996, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.032533] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-129dd12d-5f9d-4f88-ae72-ad778ae39e98 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.040413] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-345d0091-0ebe-44fc-8488-9f9e2ea02ef0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.079976] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-818dee87-aa30-4692-8479-ad1fc47dc7eb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.089147] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b41c266e-9435-4b34-9991-d4d27f005c55 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.102731] env[68674]: DEBUG nova.compute.provider_tree [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 787.117897] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "refresh_cache-f69c5fcf-6d25-48a5-a154-c3632c76175a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.118066] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquired lock "refresh_cache-f69c5fcf-6d25-48a5-a154-c3632c76175a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 787.118224] env[68674]: DEBUG nova.network.neutron [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 787.199629] env[68674]: DEBUG nova.compute.manager [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 787.200640] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b18a937-b318-45d6-9f53-499426ad5745 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.303600] env[68674]: DEBUG oslo_vmware.api [None req-8b93b481-0c89-4ea8-bca5-47a851d7abf4 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': task-3239995, 'name': PowerOffVM_Task, 'duration_secs': 0.54098} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.303903] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b93b481-0c89-4ea8-bca5-47a851d7abf4 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 787.304274] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8b93b481-0c89-4ea8-bca5-47a851d7abf4 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 787.304368] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5dd8ae12-bf2d-46da-9b85-17b5cfd04c8c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.372439] env[68674]: DEBUG oslo_vmware.api [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239997, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.400148] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8b93b481-0c89-4ea8-bca5-47a851d7abf4 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 787.400399] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8b93b481-0c89-4ea8-bca5-47a851d7abf4 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 787.400585] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b93b481-0c89-4ea8-bca5-47a851d7abf4 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Deleting the datastore file [datastore2] 2007222e-e4e5-44b3-bd9e-55b4a2143c3e {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 787.400866] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2ca4a3c8-6c70-465f-bf90-4f85bb188a8c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.408697] env[68674]: DEBUG oslo_vmware.api [None req-8b93b481-0c89-4ea8-bca5-47a851d7abf4 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Waiting for the task: (returnval){ [ 787.408697] env[68674]: value = "task-3239999" [ 787.408697] env[68674]: _type = "Task" [ 787.408697] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.424175] env[68674]: DEBUG oslo_vmware.api [None req-8b93b481-0c89-4ea8-bca5-47a851d7abf4 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': task-3239999, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.428081] env[68674]: DEBUG oslo_vmware.api [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52964a0d-6939-379c-fdda-01352aa6333d, 'name': SearchDatastore_Task, 'duration_secs': 0.02805} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.428370] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 787.428603] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 787.428885] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.428976] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 787.429219] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 787.429882] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e56950f4-5d14-47cf-9d14-e187078f512e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.438579] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 787.439319] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 787.439319] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 787.439319] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 787.439485] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 787.439578] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 787.439739] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68674) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 787.439892] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager.update_available_resource {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 787.447143] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 787.448251] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 787.449138] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e36beb0e-da99-4029-b480-384695edc2cd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.458813] env[68674]: DEBUG oslo_vmware.api [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 787.458813] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5277cf8a-ed42-bb60-5516-cd28086b685d" [ 787.458813] env[68674]: _type = "Task" [ 787.458813] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.461474] env[68674]: DEBUG oslo_vmware.api [None req-7d5369f9-08fb-42da-a4fd-0c178e816bba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3239996, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.472173] env[68674]: DEBUG oslo_vmware.api [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5277cf8a-ed42-bb60-5516-cd28086b685d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.507433] env[68674]: DEBUG nova.compute.manager [req-e62c256a-a1fc-4ee7-82b3-cf554f73925d req-1d47b1cc-8ddc-4fa0-932e-8f07d0f8aa4f service nova] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Received event network-vif-plugged-9426039c-799a-4219-8e85-2ab029f56643 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 787.507433] env[68674]: DEBUG oslo_concurrency.lockutils [req-e62c256a-a1fc-4ee7-82b3-cf554f73925d req-1d47b1cc-8ddc-4fa0-932e-8f07d0f8aa4f service nova] Acquiring lock "f69c5fcf-6d25-48a5-a154-c3632c76175a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 787.507433] env[68674]: DEBUG oslo_concurrency.lockutils [req-e62c256a-a1fc-4ee7-82b3-cf554f73925d req-1d47b1cc-8ddc-4fa0-932e-8f07d0f8aa4f service nova] Lock "f69c5fcf-6d25-48a5-a154-c3632c76175a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 787.507433] env[68674]: DEBUG oslo_concurrency.lockutils [req-e62c256a-a1fc-4ee7-82b3-cf554f73925d req-1d47b1cc-8ddc-4fa0-932e-8f07d0f8aa4f service nova] Lock "f69c5fcf-6d25-48a5-a154-c3632c76175a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 787.507433] env[68674]: DEBUG nova.compute.manager [req-e62c256a-a1fc-4ee7-82b3-cf554f73925d req-1d47b1cc-8ddc-4fa0-932e-8f07d0f8aa4f service nova] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] No waiting events found dispatching network-vif-plugged-9426039c-799a-4219-8e85-2ab029f56643 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 787.507433] env[68674]: WARNING nova.compute.manager [req-e62c256a-a1fc-4ee7-82b3-cf554f73925d req-1d47b1cc-8ddc-4fa0-932e-8f07d0f8aa4f service nova] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Received unexpected event network-vif-plugged-9426039c-799a-4219-8e85-2ab029f56643 for instance with vm_state building and task_state spawning. [ 787.507433] env[68674]: DEBUG nova.compute.manager [req-e62c256a-a1fc-4ee7-82b3-cf554f73925d req-1d47b1cc-8ddc-4fa0-932e-8f07d0f8aa4f service nova] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Received event network-changed-9426039c-799a-4219-8e85-2ab029f56643 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 787.507433] env[68674]: DEBUG nova.compute.manager [req-e62c256a-a1fc-4ee7-82b3-cf554f73925d req-1d47b1cc-8ddc-4fa0-932e-8f07d0f8aa4f service nova] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Refreshing instance network info cache due to event network-changed-9426039c-799a-4219-8e85-2ab029f56643. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 787.507433] env[68674]: DEBUG oslo_concurrency.lockutils [req-e62c256a-a1fc-4ee7-82b3-cf554f73925d req-1d47b1cc-8ddc-4fa0-932e-8f07d0f8aa4f service nova] Acquiring lock "refresh_cache-f69c5fcf-6d25-48a5-a154-c3632c76175a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.520749] env[68674]: DEBUG nova.compute.manager [req-6bd92d5c-50f3-450f-8112-929b1754583c req-115a42d5-ffd7-4fba-b04f-ca002b262f21 service nova] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Received event network-vif-deleted-fe21252e-0622-4932-bd5d-cd1a3e5ecdc4 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 787.606339] env[68674]: DEBUG nova.scheduler.client.report [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 787.659454] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquiring lock "7aa58e2f-1202-4252-9c38-ce53084c573f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 787.659786] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lock "7aa58e2f-1202-4252-9c38-ce53084c573f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 787.714632] env[68674]: INFO nova.compute.manager [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] instance snapshotting [ 787.721250] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e9e2026-c30f-45ef-bc0c-b0f05266b75d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.743851] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3da8802e-bd87-44d6-b441-04ca0d4617a4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.833874] env[68674]: DEBUG nova.network.neutron [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 787.873730] env[68674]: DEBUG oslo_vmware.api [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239997, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.928990] env[68674]: DEBUG oslo_vmware.api [None req-8b93b481-0c89-4ea8-bca5-47a851d7abf4 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': task-3239999, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.944592] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 787.962240] env[68674]: DEBUG oslo_vmware.api [None req-7d5369f9-08fb-42da-a4fd-0c178e816bba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3239996, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.971493] env[68674]: DEBUG oslo_vmware.api [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5277cf8a-ed42-bb60-5516-cd28086b685d, 'name': SearchDatastore_Task, 'duration_secs': 0.046265} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.972376] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5051ea8f-3876-437d-b410-2d3d88e634a5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.978221] env[68674]: DEBUG oslo_vmware.api [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 787.978221] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]526c7d38-ed7e-de92-74cb-5c93301b4dda" [ 787.978221] env[68674]: _type = "Task" [ 787.978221] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.991443] env[68674]: DEBUG oslo_vmware.api [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]526c7d38-ed7e-de92-74cb-5c93301b4dda, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.112405] env[68674]: DEBUG oslo_concurrency.lockutils [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.638s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 788.112913] env[68674]: DEBUG nova.compute.manager [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 788.115735] env[68674]: DEBUG oslo_concurrency.lockutils [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.299s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 788.162992] env[68674]: DEBUG nova.compute.manager [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 788.182074] env[68674]: DEBUG nova.network.neutron [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Updating instance_info_cache with network_info: [{"id": "9426039c-799a-4219-8e85-2ab029f56643", "address": "fa:16:3e:f4:44:99", "network": {"id": "896418b0-8817-49dc-a965-e44ed5221810", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1185393062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3dceab4b22c34737bc85ee5a5ded00d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e7f6f41-f4eb-4832-a390-730fca1cf717", "external-id": "nsx-vlan-transportzone-724", "segmentation_id": 724, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9426039c-79", "ovs_interfaceid": "9426039c-799a-4219-8e85-2ab029f56643", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.255598] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Creating Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 788.256018] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-b22e18d5-a50f-4f83-8548-d03285672bc8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.267374] env[68674]: DEBUG oslo_vmware.api [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for the task: (returnval){ [ 788.267374] env[68674]: value = "task-3240000" [ 788.267374] env[68674]: _type = "Task" [ 788.267374] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.276948] env[68674]: DEBUG oslo_vmware.api [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3240000, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.372107] env[68674]: DEBUG oslo_vmware.api [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239997, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.430999] env[68674]: DEBUG oslo_vmware.api [None req-8b93b481-0c89-4ea8-bca5-47a851d7abf4 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Task: {'id': task-3239999, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.971332} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.432094] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b93b481-0c89-4ea8-bca5-47a851d7abf4 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 788.432094] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8b93b481-0c89-4ea8-bca5-47a851d7abf4 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 788.432094] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8b93b481-0c89-4ea8-bca5-47a851d7abf4 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 788.434854] env[68674]: INFO nova.compute.manager [None req-8b93b481-0c89-4ea8-bca5-47a851d7abf4 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Took 2.17 seconds to destroy the instance on the hypervisor. [ 788.434854] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8b93b481-0c89-4ea8-bca5-47a851d7abf4 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 788.434854] env[68674]: DEBUG nova.compute.manager [-] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 788.434854] env[68674]: DEBUG nova.network.neutron [-] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 788.468715] env[68674]: DEBUG oslo_vmware.api [None req-7d5369f9-08fb-42da-a4fd-0c178e816bba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3239996, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.489283] env[68674]: DEBUG oslo_vmware.api [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]526c7d38-ed7e-de92-74cb-5c93301b4dda, 'name': SearchDatastore_Task, 'duration_secs': 0.014433} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.489574] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 788.489848] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 5c12cb5d-821c-4e63-86a0-dadc9794a8ba/5c12cb5d-821c-4e63-86a0-dadc9794a8ba.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 788.490090] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-630648d0-4d9e-438a-b0be-b12618c7152a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.497750] env[68674]: DEBUG oslo_vmware.api [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 788.497750] env[68674]: value = "task-3240001" [ 788.497750] env[68674]: _type = "Task" [ 788.497750] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.506099] env[68674]: DEBUG oslo_vmware.api [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240001, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.625775] env[68674]: DEBUG nova.compute.utils [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 788.627406] env[68674]: DEBUG nova.compute.manager [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Not allocating networking since 'none' was specified. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 788.685646] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Releasing lock "refresh_cache-f69c5fcf-6d25-48a5-a154-c3632c76175a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 788.686055] env[68674]: DEBUG nova.compute.manager [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Instance network_info: |[{"id": "9426039c-799a-4219-8e85-2ab029f56643", "address": "fa:16:3e:f4:44:99", "network": {"id": "896418b0-8817-49dc-a965-e44ed5221810", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1185393062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3dceab4b22c34737bc85ee5a5ded00d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e7f6f41-f4eb-4832-a390-730fca1cf717", "external-id": "nsx-vlan-transportzone-724", "segmentation_id": 724, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9426039c-79", "ovs_interfaceid": "9426039c-799a-4219-8e85-2ab029f56643", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 788.686393] env[68674]: DEBUG oslo_concurrency.lockutils [req-e62c256a-a1fc-4ee7-82b3-cf554f73925d req-1d47b1cc-8ddc-4fa0-932e-8f07d0f8aa4f service nova] Acquired lock "refresh_cache-f69c5fcf-6d25-48a5-a154-c3632c76175a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 788.686582] env[68674]: DEBUG nova.network.neutron [req-e62c256a-a1fc-4ee7-82b3-cf554f73925d req-1d47b1cc-8ddc-4fa0-932e-8f07d0f8aa4f service nova] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Refreshing network info cache for port 9426039c-799a-4219-8e85-2ab029f56643 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 788.688077] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:44:99', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8e7f6f41-f4eb-4832-a390-730fca1cf717', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9426039c-799a-4219-8e85-2ab029f56643', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 788.705659] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 788.706825] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 788.711398] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 788.711398] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2f07cbbc-4930-407a-b2c2-517bdbe532ea {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.735811] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 788.735811] env[68674]: value = "task-3240002" [ 788.735811] env[68674]: _type = "Task" [ 788.735811] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.744157] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240002, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.750199] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b50dfda-3d85-4a66-88ea-220775bb2ef7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.757875] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50ec6f38-37a9-411c-aab9-be8260ebd721 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.798346] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9978adc3-2fc2-4814-b64d-cec78cdd5bb3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.808068] env[68674]: DEBUG oslo_vmware.api [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3240000, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.809492] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4574c9a6-b10b-4928-90f0-f691f158cff5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.823847] env[68674]: DEBUG nova.compute.provider_tree [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 788.877216] env[68674]: DEBUG oslo_vmware.api [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3239997, 'name': CloneVM_Task, 'duration_secs': 1.843915} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.877498] env[68674]: INFO nova.virt.vmwareapi.vmops [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Created linked-clone VM from snapshot [ 788.878520] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d34691db-846a-4b1b-b7b2-cda8bd9dec9d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.886421] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Uploading image 662235da-0849-4b73-b3fa-11a761dbe32b {{(pid=68674) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 788.949586] env[68674]: DEBUG oslo_vmware.rw_handles [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 788.949586] env[68674]: value = "vm-647545" [ 788.949586] env[68674]: _type = "VirtualMachine" [ 788.949586] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 788.949586] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-74197c0e-f92d-4633-a06d-6533e2fc3ebd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.957470] env[68674]: DEBUG oslo_vmware.rw_handles [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lease: (returnval){ [ 788.957470] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f7cf85-e32c-780b-3d19-349384bca709" [ 788.957470] env[68674]: _type = "HttpNfcLease" [ 788.957470] env[68674]: } obtained for exporting VM: (result){ [ 788.957470] env[68674]: value = "vm-647545" [ 788.957470] env[68674]: _type = "VirtualMachine" [ 788.957470] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 788.957754] env[68674]: DEBUG oslo_vmware.api [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the lease: (returnval){ [ 788.957754] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f7cf85-e32c-780b-3d19-349384bca709" [ 788.957754] env[68674]: _type = "HttpNfcLease" [ 788.957754] env[68674]: } to be ready. {{(pid=68674) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 788.971753] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 788.971753] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f7cf85-e32c-780b-3d19-349384bca709" [ 788.971753] env[68674]: _type = "HttpNfcLease" [ 788.971753] env[68674]: } is initializing. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 788.972142] env[68674]: DEBUG oslo_vmware.api [None req-7d5369f9-08fb-42da-a4fd-0c178e816bba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3239996, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.007862] env[68674]: DEBUG oslo_vmware.api [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240001, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.133097] env[68674]: DEBUG nova.compute.manager [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 789.247910] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240002, 'name': CreateVM_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.304388] env[68674]: DEBUG oslo_vmware.api [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3240000, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.363901] env[68674]: ERROR nova.scheduler.client.report [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [req-563d0cf0-848f-4d69-95f9-19c2eca7593c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ade3f042-7427-494b-9654-0b65e074850c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-563d0cf0-848f-4d69-95f9-19c2eca7593c"}]}: nova.exception.BuildAbortException: Build of instance 1189fa93-608b-4684-a675-f1caf29a9f43 aborted: Failed to rebuild volume backed instance. [ 789.388935] env[68674]: DEBUG nova.scheduler.client.report [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Refreshing inventories for resource provider ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 789.410127] env[68674]: DEBUG nova.scheduler.client.report [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Updating ProviderTree inventory for provider ade3f042-7427-494b-9654-0b65e074850c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 789.410361] env[68674]: DEBUG nova.compute.provider_tree [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 789.426388] env[68674]: DEBUG nova.scheduler.client.report [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Refreshing aggregate associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, aggregates: None {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 789.467017] env[68674]: DEBUG nova.scheduler.client.report [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Refreshing trait associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 789.484887] env[68674]: DEBUG oslo_vmware.api [None req-7d5369f9-08fb-42da-a4fd-0c178e816bba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3239996, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.486635] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 789.486635] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f7cf85-e32c-780b-3d19-349384bca709" [ 789.486635] env[68674]: _type = "HttpNfcLease" [ 789.486635] env[68674]: } is ready. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 789.486914] env[68674]: DEBUG oslo_vmware.rw_handles [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 789.486914] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f7cf85-e32c-780b-3d19-349384bca709" [ 789.486914] env[68674]: _type = "HttpNfcLease" [ 789.486914] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 789.487717] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bfb51f9-9a46-4cfc-9c65-0ac1d8b30be1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.495112] env[68674]: DEBUG oslo_vmware.rw_handles [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bb9e2a-ab3d-1ae4-8b68-ab064cb67908/disk-0.vmdk from lease info. {{(pid=68674) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 789.495339] env[68674]: DEBUG oslo_vmware.rw_handles [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bb9e2a-ab3d-1ae4-8b68-ab064cb67908/disk-0.vmdk for reading. {{(pid=68674) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 789.573014] env[68674]: DEBUG nova.network.neutron [-] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.581195] env[68674]: DEBUG oslo_vmware.api [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240001, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.96803} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.581488] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 5c12cb5d-821c-4e63-86a0-dadc9794a8ba/5c12cb5d-821c-4e63-86a0-dadc9794a8ba.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 789.583308] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 789.583308] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8876a1a9-0fdd-4a9e-ba30-6e1ede7c4766 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.592757] env[68674]: DEBUG oslo_vmware.api [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 789.592757] env[68674]: value = "task-3240004" [ 789.592757] env[68674]: _type = "Task" [ 789.592757] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.600890] env[68674]: DEBUG oslo_vmware.api [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240004, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.611787] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b52f7678-1d73-46db-904f-5ff7cd93eebf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.748297] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240002, 'name': CreateVM_Task, 'duration_secs': 0.799826} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.748297] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 789.748632] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.748801] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 789.749164] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 789.750453] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-666b8f3e-35e0-48f1-9e23-a16f3ae2249c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.755136] env[68674]: DEBUG oslo_vmware.api [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 789.755136] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52aac986-851b-6ccc-7f37-43507e429044" [ 789.755136] env[68674]: _type = "Task" [ 789.755136] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.768308] env[68674]: DEBUG oslo_vmware.api [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52aac986-851b-6ccc-7f37-43507e429044, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.802843] env[68674]: DEBUG nova.compute.manager [req-b84131cf-ef75-4424-b2e3-c9ab40ce4c72 req-a1a2eee2-24c0-4def-8ad1-d6be490b91d9 service nova] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Received event network-vif-deleted-71dd9ef4-7bf9-4f8c-a04c-23431a1d8112 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 789.806371] env[68674]: DEBUG oslo_vmware.api [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3240000, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.829535] env[68674]: DEBUG nova.network.neutron [req-e62c256a-a1fc-4ee7-82b3-cf554f73925d req-1d47b1cc-8ddc-4fa0-932e-8f07d0f8aa4f service nova] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Updated VIF entry in instance network info cache for port 9426039c-799a-4219-8e85-2ab029f56643. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 789.829535] env[68674]: DEBUG nova.network.neutron [req-e62c256a-a1fc-4ee7-82b3-cf554f73925d req-1d47b1cc-8ddc-4fa0-932e-8f07d0f8aa4f service nova] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Updating instance_info_cache with network_info: [{"id": "9426039c-799a-4219-8e85-2ab029f56643", "address": "fa:16:3e:f4:44:99", "network": {"id": "896418b0-8817-49dc-a965-e44ed5221810", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1185393062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3dceab4b22c34737bc85ee5a5ded00d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e7f6f41-f4eb-4832-a390-730fca1cf717", "external-id": "nsx-vlan-transportzone-724", "segmentation_id": 724, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9426039c-79", "ovs_interfaceid": "9426039c-799a-4219-8e85-2ab029f56643", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.971877] env[68674]: DEBUG oslo_vmware.api [None req-7d5369f9-08fb-42da-a4fd-0c178e816bba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3239996, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.077795] env[68674]: INFO nova.compute.manager [-] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Took 1.64 seconds to deallocate network for instance. [ 790.111243] env[68674]: DEBUG oslo_vmware.api [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240004, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.26443} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.112315] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 790.116480] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0361ebe0-79dd-4dd7-9e5d-7f8c1113cd82 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.128557] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0821330c-8858-4edf-b85b-9b4530f19dfd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.165106] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76cb08aa-f9e0-4d51-a857-6a4d52475e91 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.170246] env[68674]: DEBUG nova.compute.manager [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 790.182904] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Reconfiguring VM instance instance-00000036 to attach disk [datastore2] 5c12cb5d-821c-4e63-86a0-dadc9794a8ba/5c12cb5d-821c-4e63-86a0-dadc9794a8ba.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 790.188261] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-81566d35-8dea-4ebb-a3d4-80714d49a01d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.243039] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54896329-0000-4a67-a69c-9e825732e741 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.247206] env[68674]: DEBUG oslo_vmware.api [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 790.247206] env[68674]: value = "task-3240005" [ 790.247206] env[68674]: _type = "Task" [ 790.247206] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.254557] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc47acd2-32bd-4dbb-a0a5-f12a611efe07 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.262595] env[68674]: DEBUG nova.virt.hardware [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 790.262595] env[68674]: DEBUG nova.virt.hardware [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 790.262595] env[68674]: DEBUG nova.virt.hardware [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 790.262595] env[68674]: DEBUG nova.virt.hardware [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 790.262595] env[68674]: DEBUG nova.virt.hardware [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 790.262595] env[68674]: DEBUG nova.virt.hardware [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 790.262851] env[68674]: DEBUG nova.virt.hardware [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 790.264027] env[68674]: DEBUG nova.virt.hardware [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 790.264027] env[68674]: DEBUG nova.virt.hardware [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 790.264027] env[68674]: DEBUG nova.virt.hardware [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 790.264027] env[68674]: DEBUG nova.virt.hardware [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 790.270095] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ce966ba-f848-4d59-bf17-26b0c6366d4f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.271625] env[68674]: DEBUG oslo_vmware.api [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240005, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.289449] env[68674]: DEBUG nova.compute.provider_tree [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 790.297057] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-541e13a2-33d7-44cb-92a0-47e3b05dea76 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.302424] env[68674]: DEBUG oslo_vmware.api [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52aac986-851b-6ccc-7f37-43507e429044, 'name': SearchDatastore_Task, 'duration_secs': 0.036323} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.304122] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 790.304525] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 790.305275] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.305456] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 790.305642] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 790.315415] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c3e4b14e-6d00-4dc1-97d1-5daf4d96564f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.329134] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Instance VIF info [] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 790.336566] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Creating folder: Project (e6f29272e3924d999042455aaeef00f8). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 790.338462] env[68674]: DEBUG oslo_concurrency.lockutils [req-e62c256a-a1fc-4ee7-82b3-cf554f73925d req-1d47b1cc-8ddc-4fa0-932e-8f07d0f8aa4f service nova] Releasing lock "refresh_cache-f69c5fcf-6d25-48a5-a154-c3632c76175a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 790.339655] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-095e4b97-216c-42b2-8800-4d4d8c917e37 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.347569] env[68674]: DEBUG oslo_vmware.api [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3240000, 'name': CreateSnapshot_Task, 'duration_secs': 1.85708} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.349732] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Created Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 790.350857] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 790.350857] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 790.353061] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98e1fcd5-554e-4670-b787-fe5040f188da {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.360120] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbe2b873-fa14-4565-a1ab-e4fd706e53fc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.361810] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Created folder: Project (e6f29272e3924d999042455aaeef00f8) in parent group-v647377. [ 790.362190] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Creating folder: Instances. Parent ref: group-v647548. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 790.363636] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-369fb0a5-7875-4ea7-928c-558f128bc7b7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.372949] env[68674]: DEBUG oslo_vmware.api [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 790.372949] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c2dc67-dcb1-1721-99ff-92f89bed2f3a" [ 790.372949] env[68674]: _type = "Task" [ 790.372949] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.386621] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Created folder: Instances in parent group-v647548. [ 790.387295] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 790.391032] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 790.391947] env[68674]: DEBUG oslo_vmware.api [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c2dc67-dcb1-1721-99ff-92f89bed2f3a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.392520] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-523d70a1-dcdc-471b-8920-0478a12c6dba {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.416816] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 790.416816] env[68674]: value = "task-3240008" [ 790.416816] env[68674]: _type = "Task" [ 790.416816] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.427616] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240008, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.472735] env[68674]: DEBUG oslo_vmware.api [None req-7d5369f9-08fb-42da-a4fd-0c178e816bba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3239996, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.591167] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8b93b481-0c89-4ea8-bca5-47a851d7abf4 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 790.757969] env[68674]: DEBUG oslo_vmware.api [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240005, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.860524] env[68674]: DEBUG nova.scheduler.client.report [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Updated inventory for provider ade3f042-7427-494b-9654-0b65e074850c with generation 81 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 790.860813] env[68674]: DEBUG nova.compute.provider_tree [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Updating resource provider ade3f042-7427-494b-9654-0b65e074850c generation from 81 to 82 during operation: update_inventory {{(pid=68674) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 790.861934] env[68674]: DEBUG nova.compute.provider_tree [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 790.885406] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Creating linked-clone VM from snapshot {{(pid=68674) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 790.885882] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f2406a6d-cd1d-4c8b-befc-98d016fa991b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.898870] env[68674]: DEBUG oslo_vmware.api [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c2dc67-dcb1-1721-99ff-92f89bed2f3a, 'name': SearchDatastore_Task, 'duration_secs': 0.026888} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.901174] env[68674]: DEBUG oslo_vmware.api [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for the task: (returnval){ [ 790.901174] env[68674]: value = "task-3240009" [ 790.901174] env[68674]: _type = "Task" [ 790.901174] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.901473] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8adf71a5-083c-4762-9124-e537aafa4244 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.911013] env[68674]: DEBUG oslo_vmware.api [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 790.911013] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525a671f-8dcc-acf9-67e8-5c4d35e1ad44" [ 790.911013] env[68674]: _type = "Task" [ 790.911013] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.914454] env[68674]: DEBUG oslo_vmware.api [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3240009, 'name': CloneVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.926684] env[68674]: DEBUG oslo_vmware.api [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525a671f-8dcc-acf9-67e8-5c4d35e1ad44, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.930039] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240008, 'name': CreateVM_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.972901] env[68674]: DEBUG oslo_vmware.api [None req-7d5369f9-08fb-42da-a4fd-0c178e816bba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3239996, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.265158] env[68674]: DEBUG oslo_vmware.api [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240005, 'name': ReconfigVM_Task, 'duration_secs': 1.024519} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.266704] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Reconfigured VM instance instance-00000036 to attach disk [datastore2] 5c12cb5d-821c-4e63-86a0-dadc9794a8ba/5c12cb5d-821c-4e63-86a0-dadc9794a8ba.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 791.266704] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9cfb48f6-08c4-484c-90dd-5630b11a3789 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.277026] env[68674]: DEBUG oslo_vmware.api [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 791.277026] env[68674]: value = "task-3240010" [ 791.277026] env[68674]: _type = "Task" [ 791.277026] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.286758] env[68674]: DEBUG oslo_vmware.api [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240010, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.370129] env[68674]: DEBUG oslo_concurrency.lockutils [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.252s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 791.370129] env[68674]: INFO nova.compute.manager [None req-58ba5c8a-80e7-4e5d-8b95-0ff6e8e1364c tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Successfully reverted task state from rebuilding on failure for instance. [ 791.373702] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.232s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 791.375520] env[68674]: INFO nova.compute.claims [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 791.415324] env[68674]: DEBUG oslo_vmware.api [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3240009, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.430212] env[68674]: DEBUG oslo_vmware.api [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525a671f-8dcc-acf9-67e8-5c4d35e1ad44, 'name': SearchDatastore_Task, 'duration_secs': 0.05192} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.430961] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 791.431396] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] f69c5fcf-6d25-48a5-a154-c3632c76175a/f69c5fcf-6d25-48a5-a154-c3632c76175a.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 791.431834] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240008, 'name': CreateVM_Task, 'duration_secs': 0.81421} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.432148] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ff49fbfe-7f3e-4c00-97ed-10baa42a152f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.434930] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 791.435460] env[68674]: DEBUG oslo_concurrency.lockutils [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.435724] env[68674]: DEBUG oslo_concurrency.lockutils [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 791.436148] env[68674]: DEBUG oslo_concurrency.lockutils [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 791.436928] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98462fe8-9e45-45a3-95a3-1b8dab421823 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.441571] env[68674]: DEBUG oslo_vmware.api [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 791.441571] env[68674]: value = "task-3240011" [ 791.441571] env[68674]: _type = "Task" [ 791.441571] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.442415] env[68674]: DEBUG oslo_vmware.api [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Waiting for the task: (returnval){ [ 791.442415] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ac2f1c-df8a-6f4e-af46-c8c1edac80b7" [ 791.442415] env[68674]: _type = "Task" [ 791.442415] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.454578] env[68674]: DEBUG oslo_vmware.api [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240011, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.458077] env[68674]: DEBUG oslo_vmware.api [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ac2f1c-df8a-6f4e-af46-c8c1edac80b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.471468] env[68674]: DEBUG oslo_vmware.api [None req-7d5369f9-08fb-42da-a4fd-0c178e816bba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3239996, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.791100] env[68674]: DEBUG oslo_vmware.api [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240010, 'name': Rename_Task, 'duration_secs': 0.306352} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.791100] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 791.791100] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6a781e88-df87-47ba-9aab-b19c6e22713a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.802708] env[68674]: DEBUG oslo_vmware.api [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 791.802708] env[68674]: value = "task-3240012" [ 791.802708] env[68674]: _type = "Task" [ 791.802708] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.817395] env[68674]: DEBUG oslo_vmware.api [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240012, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.919932] env[68674]: DEBUG oslo_vmware.api [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3240009, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.959735] env[68674]: DEBUG oslo_vmware.api [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240011, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513765} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.964260] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] f69c5fcf-6d25-48a5-a154-c3632c76175a/f69c5fcf-6d25-48a5-a154-c3632c76175a.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 791.964598] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 791.965060] env[68674]: DEBUG oslo_vmware.api [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ac2f1c-df8a-6f4e-af46-c8c1edac80b7, 'name': SearchDatastore_Task, 'duration_secs': 0.030654} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.965343] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-25ecf2ba-7f3e-4bc6-af0f-957cc1aa002a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.971804] env[68674]: DEBUG oslo_concurrency.lockutils [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 791.972225] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 791.972536] env[68674]: DEBUG oslo_concurrency.lockutils [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.972726] env[68674]: DEBUG oslo_concurrency.lockutils [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 791.972976] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 791.973316] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ed8a7b8d-e5bf-4562-8d7c-bef7a0b413b3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.981967] env[68674]: DEBUG oslo_vmware.api [None req-7d5369f9-08fb-42da-a4fd-0c178e816bba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3239996, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.984205] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 791.984387] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 791.985223] env[68674]: DEBUG oslo_vmware.api [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 791.985223] env[68674]: value = "task-3240013" [ 791.985223] env[68674]: _type = "Task" [ 791.985223] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.985429] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad240be7-8389-4871-a365-98fc41a97625 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.996515] env[68674]: DEBUG oslo_vmware.api [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Waiting for the task: (returnval){ [ 791.996515] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]526b06ce-903f-ab8a-ce71-a106daf374af" [ 791.996515] env[68674]: _type = "Task" [ 791.996515] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.999747] env[68674]: DEBUG oslo_vmware.api [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240013, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.008594] env[68674]: DEBUG oslo_vmware.api [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]526b06ce-903f-ab8a-ce71-a106daf374af, 'name': SearchDatastore_Task, 'duration_secs': 0.008405} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.009648] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a1529f6-553f-492b-a34c-f5fc2d7d1b0e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.014914] env[68674]: DEBUG oslo_vmware.api [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Waiting for the task: (returnval){ [ 792.014914] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52cec22b-ae10-d9ab-eade-d65b934912ba" [ 792.014914] env[68674]: _type = "Task" [ 792.014914] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.024438] env[68674]: DEBUG oslo_vmware.api [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52cec22b-ae10-d9ab-eade-d65b934912ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.313291] env[68674]: DEBUG oslo_vmware.api [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240012, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.426016] env[68674]: DEBUG oslo_vmware.api [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3240009, 'name': CloneVM_Task} progress is 95%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.481236] env[68674]: DEBUG oslo_vmware.api [None req-7d5369f9-08fb-42da-a4fd-0c178e816bba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3239996, 'name': ReconfigVM_Task, 'duration_secs': 5.819161} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.481594] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7d5369f9-08fb-42da-a4fd-0c178e816bba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Releasing lock "045e54ff-9e2c-4b04-afac-34cb6580cb2c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 792.481594] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7d5369f9-08fb-42da-a4fd-0c178e816bba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Reconfigured VM to detach interface {{(pid=68674) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 792.500977] env[68674]: DEBUG oslo_vmware.api [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240013, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092532} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.501357] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 792.502444] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2901355-e1d0-4291-a587-0586d2cf2454 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.531456] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Reconfiguring VM instance instance-00000037 to attach disk [datastore2] f69c5fcf-6d25-48a5-a154-c3632c76175a/f69c5fcf-6d25-48a5-a154-c3632c76175a.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 792.539024] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8b78f16-1e80-442c-9ef3-9cef99a1c889 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.561073] env[68674]: DEBUG oslo_vmware.api [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52cec22b-ae10-d9ab-eade-d65b934912ba, 'name': SearchDatastore_Task, 'duration_secs': 0.01016} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.562541] env[68674]: DEBUG oslo_concurrency.lockutils [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 792.563137] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f/1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 792.563137] env[68674]: DEBUG oslo_vmware.api [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 792.563137] env[68674]: value = "task-3240014" [ 792.563137] env[68674]: _type = "Task" [ 792.563137] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.563379] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ee44372b-c0fc-4358-b16c-593803831384 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.577524] env[68674]: DEBUG oslo_vmware.api [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240014, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.579414] env[68674]: DEBUG oslo_vmware.api [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Waiting for the task: (returnval){ [ 792.579414] env[68674]: value = "task-3240015" [ 792.579414] env[68674]: _type = "Task" [ 792.579414] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.588846] env[68674]: DEBUG oslo_vmware.api [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': task-3240015, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.818347] env[68674]: DEBUG oslo_vmware.api [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240012, 'name': PowerOnVM_Task, 'duration_secs': 0.535127} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.818347] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 792.818347] env[68674]: INFO nova.compute.manager [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Took 10.16 seconds to spawn the instance on the hypervisor. [ 792.818347] env[68674]: DEBUG nova.compute.manager [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 792.818347] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-290aa20e-7537-42dd-931f-da660e872d51 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.923033] env[68674]: DEBUG oslo_vmware.api [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3240009, 'name': CloneVM_Task, 'duration_secs': 1.764487} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.923033] env[68674]: INFO nova.virt.vmwareapi.vmops [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Created linked-clone VM from snapshot [ 792.923592] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af1482ca-25a1-4952-b0df-23ebf8e005cf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.935422] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Uploading image 819147a7-c618-43d3-9313-86c442b2d2a3 {{(pid=68674) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 792.952532] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ce6802e-7289-40a5-a288-6c488d2796fa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.964494] env[68674]: DEBUG oslo_vmware.rw_handles [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 792.964494] env[68674]: value = "vm-647551" [ 792.964494] env[68674]: _type = "VirtualMachine" [ 792.964494] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 792.965691] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-507beb60-02b9-41c8-9cf5-86aab1be9210 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.969548] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-47dc47ce-899f-4d4e-a4fb-ecb0f0218aa9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.002472] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72c7d78d-7c7e-45ef-9245-cf902915162b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.006106] env[68674]: DEBUG oslo_vmware.rw_handles [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Lease: (returnval){ [ 793.006106] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a45cbc-a701-872d-2bbe-49a3834821fb" [ 793.006106] env[68674]: _type = "HttpNfcLease" [ 793.006106] env[68674]: } obtained for exporting VM: (result){ [ 793.006106] env[68674]: value = "vm-647551" [ 793.006106] env[68674]: _type = "VirtualMachine" [ 793.006106] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 793.006444] env[68674]: DEBUG oslo_vmware.api [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for the lease: (returnval){ [ 793.006444] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a45cbc-a701-872d-2bbe-49a3834821fb" [ 793.006444] env[68674]: _type = "HttpNfcLease" [ 793.006444] env[68674]: } to be ready. {{(pid=68674) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 793.014106] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15fdc363-57ec-426e-a71c-d9a89b505ad0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.020088] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 793.020088] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a45cbc-a701-872d-2bbe-49a3834821fb" [ 793.020088] env[68674]: _type = "HttpNfcLease" [ 793.020088] env[68674]: } is ready. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 793.020691] env[68674]: DEBUG oslo_vmware.rw_handles [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 793.020691] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a45cbc-a701-872d-2bbe-49a3834821fb" [ 793.020691] env[68674]: _type = "HttpNfcLease" [ 793.020691] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 793.021470] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-681f330d-ea2f-4918-aee7-1a8f310dd21f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.033964] env[68674]: DEBUG nova.compute.provider_tree [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 793.041241] env[68674]: DEBUG oslo_vmware.rw_handles [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52629a0a-59e3-2ce5-1f36-1100082585c3/disk-0.vmdk from lease info. {{(pid=68674) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 793.041430] env[68674]: DEBUG oslo_vmware.rw_handles [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52629a0a-59e3-2ce5-1f36-1100082585c3/disk-0.vmdk for reading. {{(pid=68674) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 793.111454] env[68674]: DEBUG oslo_vmware.api [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240014, 'name': ReconfigVM_Task, 'duration_secs': 0.479942} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.114762] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Reconfigured VM instance instance-00000037 to attach disk [datastore2] f69c5fcf-6d25-48a5-a154-c3632c76175a/f69c5fcf-6d25-48a5-a154-c3632c76175a.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 793.115762] env[68674]: DEBUG oslo_vmware.api [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': task-3240015, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.458169} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.115979] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e7a7a3fe-395d-4104-b563-9f1419066dfb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.117705] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f/1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 793.117923] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 793.118181] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f22865d3-230a-4bbc-b7ff-30d64d8dc61c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.124439] env[68674]: DEBUG oslo_vmware.api [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 793.124439] env[68674]: value = "task-3240017" [ 793.124439] env[68674]: _type = "Task" [ 793.124439] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.125777] env[68674]: DEBUG oslo_vmware.api [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Waiting for the task: (returnval){ [ 793.125777] env[68674]: value = "task-3240018" [ 793.125777] env[68674]: _type = "Task" [ 793.125777] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.139057] env[68674]: DEBUG oslo_vmware.api [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240017, 'name': Rename_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.143346] env[68674]: DEBUG oslo_vmware.api [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': task-3240018, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.153486] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-172c90ad-a14b-49ce-80d0-6fb450c139b9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.346851] env[68674]: DEBUG nova.compute.manager [req-969de992-afeb-480b-9eb1-8bbf87b91974 req-5bceba85-e26a-43c1-9152-0a3ba8a58b30 service nova] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Received event network-vif-deleted-09fb1c43-275f-4342-a947-5a8d1f977597 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 793.346851] env[68674]: INFO nova.compute.manager [req-969de992-afeb-480b-9eb1-8bbf87b91974 req-5bceba85-e26a-43c1-9152-0a3ba8a58b30 service nova] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Neutron deleted interface 09fb1c43-275f-4342-a947-5a8d1f977597; detaching it from the instance and deleting it from the info cache [ 793.346986] env[68674]: DEBUG nova.network.neutron [req-969de992-afeb-480b-9eb1-8bbf87b91974 req-5bceba85-e26a-43c1-9152-0a3ba8a58b30 service nova] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Updating instance_info_cache with network_info: [{"id": "55160236-eb1d-47d3-bca8-d3b46267c37f", "address": "fa:16:3e:3e:26:3e", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55160236-eb", "ovs_interfaceid": "55160236-eb1d-47d3-bca8-d3b46267c37f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.348568] env[68674]: INFO nova.compute.manager [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Took 49.34 seconds to build instance. [ 793.589051] env[68674]: DEBUG nova.scheduler.client.report [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Updated inventory for provider ade3f042-7427-494b-9654-0b65e074850c with generation 82 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 793.589426] env[68674]: DEBUG nova.compute.provider_tree [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Updating resource provider ade3f042-7427-494b-9654-0b65e074850c generation from 82 to 83 during operation: update_inventory {{(pid=68674) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 793.589710] env[68674]: DEBUG nova.compute.provider_tree [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 793.642142] env[68674]: DEBUG oslo_vmware.api [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240017, 'name': Rename_Task, 'duration_secs': 0.22023} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.647359] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 793.648071] env[68674]: DEBUG oslo_vmware.api [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': task-3240018, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075514} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.648354] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b111a571-6d4b-4b1e-88ed-6ee3e0ef2074 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.650435] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 793.651644] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13bc1930-5d33-4761-92dc-053b2c60a011 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.676320] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Reconfiguring VM instance instance-00000038 to attach disk [datastore2] 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f/1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 793.678976] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2bc719fa-14bf-47cc-8d5b-56406233d2ae {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.701427] env[68674]: DEBUG oslo_vmware.api [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 793.701427] env[68674]: value = "task-3240019" [ 793.701427] env[68674]: _type = "Task" [ 793.701427] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.708884] env[68674]: DEBUG oslo_vmware.api [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Waiting for the task: (returnval){ [ 793.708884] env[68674]: value = "task-3240020" [ 793.708884] env[68674]: _type = "Task" [ 793.708884] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.716235] env[68674]: DEBUG oslo_vmware.api [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240019, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.724022] env[68674]: DEBUG oslo_vmware.api [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': task-3240020, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.852142] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b09add3c-f574-4179-8c01-ed6d06d26bf1 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lock "5c12cb5d-821c-4e63-86a0-dadc9794a8ba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 112.402s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 793.852771] env[68674]: DEBUG oslo_concurrency.lockutils [req-969de992-afeb-480b-9eb1-8bbf87b91974 req-5bceba85-e26a-43c1-9152-0a3ba8a58b30 service nova] Acquiring lock "045e54ff-9e2c-4b04-afac-34cb6580cb2c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.853013] env[68674]: DEBUG oslo_concurrency.lockutils [req-969de992-afeb-480b-9eb1-8bbf87b91974 req-5bceba85-e26a-43c1-9152-0a3ba8a58b30 service nova] Acquired lock "045e54ff-9e2c-4b04-afac-34cb6580cb2c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 793.854598] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-471ef47b-ad0f-4737-a997-c70ecdbd80eb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.876880] env[68674]: DEBUG oslo_concurrency.lockutils [req-969de992-afeb-480b-9eb1-8bbf87b91974 req-5bceba85-e26a-43c1-9152-0a3ba8a58b30 service nova] Releasing lock "045e54ff-9e2c-4b04-afac-34cb6580cb2c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 793.878346] env[68674]: WARNING nova.compute.manager [req-969de992-afeb-480b-9eb1-8bbf87b91974 req-5bceba85-e26a-43c1-9152-0a3ba8a58b30 service nova] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Detach interface failed, port_id=09fb1c43-275f-4342-a947-5a8d1f977597, reason: No device with interface-id 09fb1c43-275f-4342-a947-5a8d1f977597 exists on VM: nova.exception.NotFound: No device with interface-id 09fb1c43-275f-4342-a947-5a8d1f977597 exists on VM [ 793.967525] env[68674]: DEBUG oslo_concurrency.lockutils [None req-105f777e-ea9b-472b-87dc-e37a42d58f69 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "045e54ff-9e2c-4b04-afac-34cb6580cb2c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 793.968105] env[68674]: DEBUG oslo_concurrency.lockutils [None req-105f777e-ea9b-472b-87dc-e37a42d58f69 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "045e54ff-9e2c-4b04-afac-34cb6580cb2c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 793.969349] env[68674]: DEBUG oslo_concurrency.lockutils [None req-105f777e-ea9b-472b-87dc-e37a42d58f69 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "045e54ff-9e2c-4b04-afac-34cb6580cb2c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 793.969739] env[68674]: DEBUG oslo_concurrency.lockutils [None req-105f777e-ea9b-472b-87dc-e37a42d58f69 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "045e54ff-9e2c-4b04-afac-34cb6580cb2c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 793.970074] env[68674]: DEBUG oslo_concurrency.lockutils [None req-105f777e-ea9b-472b-87dc-e37a42d58f69 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "045e54ff-9e2c-4b04-afac-34cb6580cb2c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 793.972653] env[68674]: INFO nova.compute.manager [None req-105f777e-ea9b-472b-87dc-e37a42d58f69 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Terminating instance [ 794.095817] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.722s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 794.096384] env[68674]: DEBUG nova.compute.manager [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 794.101966] env[68674]: DEBUG oslo_concurrency.lockutils [None req-51680493-c30d-4fae-80de-39a4a6d76f22 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.714s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 794.102354] env[68674]: DEBUG nova.objects.instance [None req-51680493-c30d-4fae-80de-39a4a6d76f22 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Lazy-loading 'resources' on Instance uuid 1189fa93-608b-4684-a675-f1caf29a9f43 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 794.110212] env[68674]: INFO nova.compute.manager [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Rescuing [ 794.110212] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquiring lock "refresh_cache-5c12cb5d-821c-4e63-86a0-dadc9794a8ba" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.110212] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquired lock "refresh_cache-5c12cb5d-821c-4e63-86a0-dadc9794a8ba" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 794.110212] env[68674]: DEBUG nova.network.neutron [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 794.110892] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7d5369f9-08fb-42da-a4fd-0c178e816bba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "refresh_cache-045e54ff-9e2c-4b04-afac-34cb6580cb2c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.111765] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7d5369f9-08fb-42da-a4fd-0c178e816bba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquired lock "refresh_cache-045e54ff-9e2c-4b04-afac-34cb6580cb2c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 794.112035] env[68674]: DEBUG nova.network.neutron [None req-7d5369f9-08fb-42da-a4fd-0c178e816bba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 794.221198] env[68674]: DEBUG oslo_vmware.api [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240019, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.227988] env[68674]: DEBUG oslo_vmware.api [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': task-3240020, 'name': ReconfigVM_Task, 'duration_secs': 0.303398} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.227988] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Reconfigured VM instance instance-00000038 to attach disk [datastore2] 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f/1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 794.227988] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-91acccf3-d0c1-4dca-9b7a-c166164424e6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.235023] env[68674]: DEBUG oslo_vmware.api [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Waiting for the task: (returnval){ [ 794.235023] env[68674]: value = "task-3240021" [ 794.235023] env[68674]: _type = "Task" [ 794.235023] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.249513] env[68674]: DEBUG oslo_vmware.api [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': task-3240021, 'name': Rename_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.478430] env[68674]: DEBUG nova.compute.manager [None req-105f777e-ea9b-472b-87dc-e37a42d58f69 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 794.482028] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-105f777e-ea9b-472b-87dc-e37a42d58f69 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 794.482028] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-033dc862-8987-4685-a298-5f2a24a20120 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.487551] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-105f777e-ea9b-472b-87dc-e37a42d58f69 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 794.487915] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8c1ba4b2-6e62-4d55-b4b0-09677671e8e3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.495261] env[68674]: DEBUG oslo_vmware.api [None req-105f777e-ea9b-472b-87dc-e37a42d58f69 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 794.495261] env[68674]: value = "task-3240022" [ 794.495261] env[68674]: _type = "Task" [ 794.495261] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.508389] env[68674]: DEBUG oslo_vmware.api [None req-105f777e-ea9b-472b-87dc-e37a42d58f69 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240022, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.610372] env[68674]: DEBUG nova.compute.utils [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 794.611959] env[68674]: DEBUG nova.compute.manager [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 794.616316] env[68674]: DEBUG nova.network.neutron [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 794.679371] env[68674]: DEBUG nova.policy [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3708546dea34437f9bcbd5504c346d33', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8e05a97545e94e8a9be8f382457d79b2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 794.721976] env[68674]: DEBUG oslo_vmware.api [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240019, 'name': PowerOnVM_Task, 'duration_secs': 0.779015} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.727061] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 794.727355] env[68674]: INFO nova.compute.manager [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Took 9.34 seconds to spawn the instance on the hypervisor. [ 794.728110] env[68674]: DEBUG nova.compute.manager [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 794.738077] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c15f6cd3-e764-46ed-9125-0e49c28e1650 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.775396] env[68674]: DEBUG oslo_vmware.api [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': task-3240021, 'name': Rename_Task, 'duration_secs': 0.176358} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.775610] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 794.775922] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b905de1a-0068-49fe-97fc-08f3a1ab071d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.783179] env[68674]: DEBUG oslo_vmware.api [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Waiting for the task: (returnval){ [ 794.783179] env[68674]: value = "task-3240023" [ 794.783179] env[68674]: _type = "Task" [ 794.783179] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.797440] env[68674]: DEBUG oslo_vmware.api [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': task-3240023, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.022198] env[68674]: DEBUG oslo_vmware.api [None req-105f777e-ea9b-472b-87dc-e37a42d58f69 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240022, 'name': PowerOffVM_Task, 'duration_secs': 0.304514} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.025470] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-105f777e-ea9b-472b-87dc-e37a42d58f69 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 795.025726] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-105f777e-ea9b-472b-87dc-e37a42d58f69 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 795.026322] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-41eba8e5-78f7-48c4-bdc7-270c32f4b105 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.104425] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-105f777e-ea9b-472b-87dc-e37a42d58f69 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 795.105169] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-105f777e-ea9b-472b-87dc-e37a42d58f69 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 795.105534] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-105f777e-ea9b-472b-87dc-e37a42d58f69 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Deleting the datastore file [datastore2] 045e54ff-9e2c-4b04-afac-34cb6580cb2c {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 795.105960] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-50702920-37ea-406c-b129-a3701f1e2783 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.115041] env[68674]: DEBUG oslo_vmware.api [None req-105f777e-ea9b-472b-87dc-e37a42d58f69 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 795.115041] env[68674]: value = "task-3240025" [ 795.115041] env[68674]: _type = "Task" [ 795.115041] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.120262] env[68674]: DEBUG nova.network.neutron [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Successfully created port: 4b49219f-9f62-4839-b5a2-eb1116da215c {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 795.124390] env[68674]: DEBUG nova.compute.utils [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 795.132724] env[68674]: DEBUG oslo_vmware.api [None req-105f777e-ea9b-472b-87dc-e37a42d58f69 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240025, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.230262] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28d88297-5739-43ad-a654-d18b2167a00a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.243762] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81f16c74-be52-44fe-9b5e-d9e97f75398a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.304345] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aa9f614-e218-4405-a3a0-7cd3fd4d9b18 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.307844] env[68674]: INFO nova.compute.manager [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Took 49.67 seconds to build instance. [ 795.319058] env[68674]: DEBUG oslo_vmware.api [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': task-3240023, 'name': PowerOnVM_Task, 'duration_secs': 0.51748} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.321552] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 795.321552] env[68674]: INFO nova.compute.manager [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Took 5.15 seconds to spawn the instance on the hypervisor. [ 795.321552] env[68674]: DEBUG nova.compute.manager [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 795.321852] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-535bb91b-fe56-401c-85d2-e202769f8033 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.327162] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa2285bf-5e21-4176-a95e-a170df4512b6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.347304] env[68674]: DEBUG nova.compute.provider_tree [None req-51680493-c30d-4fae-80de-39a4a6d76f22 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 795.624620] env[68674]: DEBUG oslo_vmware.api [None req-105f777e-ea9b-472b-87dc-e37a42d58f69 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240025, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.30322} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.624937] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-105f777e-ea9b-472b-87dc-e37a42d58f69 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 795.625078] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-105f777e-ea9b-472b-87dc-e37a42d58f69 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 795.625260] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-105f777e-ea9b-472b-87dc-e37a42d58f69 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 795.625438] env[68674]: INFO nova.compute.manager [None req-105f777e-ea9b-472b-87dc-e37a42d58f69 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Took 1.15 seconds to destroy the instance on the hypervisor. [ 795.625679] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-105f777e-ea9b-472b-87dc-e37a42d58f69 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 795.625867] env[68674]: DEBUG nova.compute.manager [-] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 795.625961] env[68674]: DEBUG nova.network.neutron [-] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 795.627908] env[68674]: DEBUG nova.compute.manager [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 795.688768] env[68674]: DEBUG nova.network.neutron [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Updating instance_info_cache with network_info: [{"id": "c9e9cacf-59be-4854-a352-111921b372e2", "address": "fa:16:3e:d1:a3:98", "network": {"id": "1674c138-dbec-4d03-b5b0-d1944ab38577", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-143958570-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a84d9d6e23bd40049c34e6f087252b4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9e9cacf-59", "ovs_interfaceid": "c9e9cacf-59be-4854-a352-111921b372e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.700706] env[68674]: DEBUG nova.network.neutron [None req-7d5369f9-08fb-42da-a4fd-0c178e816bba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Updating instance_info_cache with network_info: [{"id": "55160236-eb1d-47d3-bca8-d3b46267c37f", "address": "fa:16:3e:3e:26:3e", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55160236-eb", "ovs_interfaceid": "55160236-eb1d-47d3-bca8-d3b46267c37f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.810386] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7a1b96fc-be17-4102-b264-820b8b726d2d tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "f69c5fcf-6d25-48a5-a154-c3632c76175a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 101.493s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.855174] env[68674]: DEBUG nova.scheduler.client.report [None req-51680493-c30d-4fae-80de-39a4a6d76f22 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 795.860703] env[68674]: INFO nova.compute.manager [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Took 49.54 seconds to build instance. [ 796.191829] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Releasing lock "refresh_cache-5c12cb5d-821c-4e63-86a0-dadc9794a8ba" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 796.208386] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7d5369f9-08fb-42da-a4fd-0c178e816bba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Releasing lock "refresh_cache-045e54ff-9e2c-4b04-afac-34cb6580cb2c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 796.364023] env[68674]: DEBUG oslo_concurrency.lockutils [None req-51680493-c30d-4fae-80de-39a4a6d76f22 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.260s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 796.364234] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.481s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 796.367055] env[68674]: INFO nova.compute.claims [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 796.369904] env[68674]: DEBUG oslo_concurrency.lockutils [None req-512743bf-2c69-48ee-8b10-899c0cc95dc6 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Lock "1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 94.919s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 796.640024] env[68674]: DEBUG nova.compute.manager [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 796.716361] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7d5369f9-08fb-42da-a4fd-0c178e816bba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "interface-045e54ff-9e2c-4b04-afac-34cb6580cb2c-09fb1c43-275f-4342-a947-5a8d1f977597" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.859s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 796.743921] env[68674]: DEBUG nova.network.neutron [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Successfully updated port: 4b49219f-9f62-4839-b5a2-eb1116da215c {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 796.895050] env[68674]: DEBUG oslo_concurrency.lockutils [None req-51680493-c30d-4fae-80de-39a4a6d76f22 tempest-ServerActionsV293TestJSON-1881910272 tempest-ServerActionsV293TestJSON-1881910272-project-member] Lock "1189fa93-608b-4684-a675-f1caf29a9f43" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.037s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 796.901308] env[68674]: DEBUG nova.network.neutron [-] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.251741] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Acquiring lock "refresh_cache-c4fd04a7-2b11-4c4b-84d1-53edc1e3f035" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.251741] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Acquired lock "refresh_cache-c4fd04a7-2b11-4c4b-84d1-53edc1e3f035" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 797.251741] env[68674]: DEBUG nova.network.neutron [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 797.406619] env[68674]: INFO nova.compute.manager [-] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Took 1.78 seconds to deallocate network for instance. [ 797.805441] env[68674]: DEBUG nova.network.neutron [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 797.825228] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea212518-7bcb-452c-9e2e-51a2bb58ab98 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.835880] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-687662be-d61a-4bb7-81af-24f626ebd685 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.872075] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d26de734-d2c6-403b-9286-a60708110fc3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.880040] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70fafaf9-3d64-47df-8356-22b5220df179 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.895558] env[68674]: DEBUG nova.compute.provider_tree [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 797.915628] env[68674]: DEBUG oslo_concurrency.lockutils [None req-105f777e-ea9b-472b-87dc-e37a42d58f69 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 797.985845] env[68674]: DEBUG nova.network.neutron [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Updating instance_info_cache with network_info: [{"id": "4b49219f-9f62-4839-b5a2-eb1116da215c", "address": "fa:16:3e:0d:4d:8a", "network": {"id": "f11cd326-6319-47eb-833d-5282731628e9", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-287739122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e05a97545e94e8a9be8f382457d79b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f1b507ed-cd2d-4c09-9d96-c47bde6a7774", "external-id": "nsx-vlan-transportzone-980", "segmentation_id": 980, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b49219f-9f", "ovs_interfaceid": "4b49219f-9f62-4839-b5a2-eb1116da215c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.398700] env[68674]: DEBUG nova.scheduler.client.report [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 798.488980] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Releasing lock "refresh_cache-c4fd04a7-2b11-4c4b-84d1-53edc1e3f035" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 798.489335] env[68674]: DEBUG nova.compute.manager [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Instance network_info: |[{"id": "4b49219f-9f62-4839-b5a2-eb1116da215c", "address": "fa:16:3e:0d:4d:8a", "network": {"id": "f11cd326-6319-47eb-833d-5282731628e9", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-287739122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e05a97545e94e8a9be8f382457d79b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f1b507ed-cd2d-4c09-9d96-c47bde6a7774", "external-id": "nsx-vlan-transportzone-980", "segmentation_id": 980, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b49219f-9f", "ovs_interfaceid": "4b49219f-9f62-4839-b5a2-eb1116da215c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 798.879620] env[68674]: DEBUG nova.virt.hardware [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:06:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='4266884',id=20,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-144109461',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 798.879968] env[68674]: DEBUG nova.virt.hardware [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 798.880189] env[68674]: DEBUG nova.virt.hardware [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 798.880272] env[68674]: DEBUG nova.virt.hardware [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 798.880391] env[68674]: DEBUG nova.virt.hardware [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 798.880535] env[68674]: DEBUG nova.virt.hardware [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 798.880743] env[68674]: DEBUG nova.virt.hardware [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 798.880933] env[68674]: DEBUG nova.virt.hardware [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 798.881153] env[68674]: DEBUG nova.virt.hardware [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 798.881310] env[68674]: DEBUG nova.virt.hardware [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 798.881482] env[68674]: DEBUG nova.virt.hardware [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 798.883975] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80a264b6-f7a0-480c-84fa-ae37e9f0483f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.893548] env[68674]: DEBUG oslo_vmware.rw_handles [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bb9e2a-ab3d-1ae4-8b68-ab064cb67908/disk-0.vmdk. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 798.894503] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f51087eb-e125-47f9-b409-a0b7e051ea9e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.898814] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d68c78a-3ff3-439e-9b67-889ed6963fd4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.903614] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.540s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 798.904115] env[68674]: DEBUG nova.compute.manager [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 798.915769] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.528s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 798.917455] env[68674]: INFO nova.compute.claims [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 798.920689] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0d:4d:8a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f1b507ed-cd2d-4c09-9d96-c47bde6a7774', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4b49219f-9f62-4839-b5a2-eb1116da215c', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 798.933162] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 798.933646] env[68674]: DEBUG oslo_vmware.rw_handles [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bb9e2a-ab3d-1ae4-8b68-ab064cb67908/disk-0.vmdk is in state: ready. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 798.933866] env[68674]: ERROR oslo_vmware.rw_handles [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bb9e2a-ab3d-1ae4-8b68-ab064cb67908/disk-0.vmdk due to incomplete transfer. [ 798.934664] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 798.936086] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-5fc87f68-e877-4dd9-bb38-21f7e91e97a7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.937338] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3892f5eb-d3be-4f59-a7d5-11c69d45a058 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.962237] env[68674]: DEBUG oslo_vmware.rw_handles [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bb9e2a-ab3d-1ae4-8b68-ab064cb67908/disk-0.vmdk. {{(pid=68674) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 798.962485] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Uploaded image 662235da-0849-4b73-b3fa-11a761dbe32b to the Glance image server {{(pid=68674) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 798.965946] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Destroying the VM {{(pid=68674) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 798.967608] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-dd210bb7-866e-481d-9ae3-ad6f25ddc114 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.969167] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 798.969167] env[68674]: value = "task-3240026" [ 798.969167] env[68674]: _type = "Task" [ 798.969167] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.974904] env[68674]: DEBUG oslo_vmware.api [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 798.974904] env[68674]: value = "task-3240027" [ 798.974904] env[68674]: _type = "Task" [ 798.974904] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.978243] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240026, 'name': CreateVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.986723] env[68674]: DEBUG oslo_vmware.api [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240027, 'name': Destroy_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.439594] env[68674]: DEBUG nova.compute.utils [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 799.441251] env[68674]: DEBUG nova.compute.manager [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 799.441436] env[68674]: DEBUG nova.network.neutron [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 799.484162] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240026, 'name': CreateVM_Task, 'duration_secs': 0.507297} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.488257] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 799.489131] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.489715] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 799.489715] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 799.490648] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-108f78a4-e343-4487-91e0-92b1bbd34377 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.495619] env[68674]: DEBUG oslo_vmware.api [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240027, 'name': Destroy_Task} progress is 33%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.499592] env[68674]: DEBUG oslo_vmware.api [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Waiting for the task: (returnval){ [ 799.499592] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525a4c49-800c-0a69-c2c8-b64f228f91fe" [ 799.499592] env[68674]: _type = "Task" [ 799.499592] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.513236] env[68674]: DEBUG oslo_vmware.api [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525a4c49-800c-0a69-c2c8-b64f228f91fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.515033] env[68674]: DEBUG nova.policy [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1df2b462632d47b0ba9c31ae7aececf7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '435fbf1f847d4d36ba126fc8c49b59fd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 799.675611] env[68674]: DEBUG nova.compute.manager [req-0e7df1b9-5c10-47ae-8c36-d0f53ad76b21 req-1259bd4c-1495-4af3-b7e9-cf5a84dbb23a service nova] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Received event network-vif-deleted-55160236-eb1d-47d3-bca8-d3b46267c37f {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 799.738082] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Acquiring lock "2ed83aff-9a73-464b-914a-479d91cdfce0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 799.738373] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Lock "2ed83aff-9a73-464b-914a-479d91cdfce0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 799.751669] env[68674]: DEBUG nova.compute.manager [req-0fc55a4e-5c04-4080-8b42-e175901a283f req-0842120f-25cf-418a-a805-9ea5b5ccc564 service nova] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Received event network-vif-plugged-4b49219f-9f62-4839-b5a2-eb1116da215c {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 799.752055] env[68674]: DEBUG oslo_concurrency.lockutils [req-0fc55a4e-5c04-4080-8b42-e175901a283f req-0842120f-25cf-418a-a805-9ea5b5ccc564 service nova] Acquiring lock "c4fd04a7-2b11-4c4b-84d1-53edc1e3f035-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 799.752217] env[68674]: DEBUG oslo_concurrency.lockutils [req-0fc55a4e-5c04-4080-8b42-e175901a283f req-0842120f-25cf-418a-a805-9ea5b5ccc564 service nova] Lock "c4fd04a7-2b11-4c4b-84d1-53edc1e3f035-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 799.752898] env[68674]: DEBUG oslo_concurrency.lockutils [req-0fc55a4e-5c04-4080-8b42-e175901a283f req-0842120f-25cf-418a-a805-9ea5b5ccc564 service nova] Lock "c4fd04a7-2b11-4c4b-84d1-53edc1e3f035-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 799.753155] env[68674]: DEBUG nova.compute.manager [req-0fc55a4e-5c04-4080-8b42-e175901a283f req-0842120f-25cf-418a-a805-9ea5b5ccc564 service nova] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] No waiting events found dispatching network-vif-plugged-4b49219f-9f62-4839-b5a2-eb1116da215c {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 799.753342] env[68674]: WARNING nova.compute.manager [req-0fc55a4e-5c04-4080-8b42-e175901a283f req-0842120f-25cf-418a-a805-9ea5b5ccc564 service nova] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Received unexpected event network-vif-plugged-4b49219f-9f62-4839-b5a2-eb1116da215c for instance with vm_state building and task_state spawning. [ 799.909498] env[68674]: DEBUG nova.network.neutron [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Successfully created port: 0a9b5c9d-f85d-42f8-9e17-5a63a48e7ea4 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 799.951222] env[68674]: DEBUG nova.compute.manager [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 800.004709] env[68674]: DEBUG oslo_vmware.api [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240027, 'name': Destroy_Task, 'duration_secs': 0.602227} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.011999] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Destroyed the VM [ 800.012362] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Deleting Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 800.013679] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-4e9146e4-ef16-468d-8bee-0f8a7d602f1e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.023075] env[68674]: DEBUG oslo_vmware.api [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525a4c49-800c-0a69-c2c8-b64f228f91fe, 'name': SearchDatastore_Task, 'duration_secs': 0.014858} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.024613] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 800.024972] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 800.025333] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.025508] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 800.025696] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 800.026045] env[68674]: DEBUG oslo_vmware.api [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 800.026045] env[68674]: value = "task-3240028" [ 800.026045] env[68674]: _type = "Task" [ 800.026045] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.029740] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aed65c2c-32d9-4dbc-b2bf-fbf4dc5b8d65 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.042831] env[68674]: DEBUG oslo_vmware.api [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240028, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.047139] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 800.047139] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 800.047139] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ae3ee53-bf8e-40d9-a96b-4d886402ba51 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.052354] env[68674]: DEBUG oslo_vmware.api [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Waiting for the task: (returnval){ [ 800.052354] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52204a39-f3d3-2d5b-8eb3-10dde0fddda9" [ 800.052354] env[68674]: _type = "Task" [ 800.052354] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.065680] env[68674]: DEBUG oslo_vmware.api [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52204a39-f3d3-2d5b-8eb3-10dde0fddda9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.241464] env[68674]: DEBUG nova.compute.manager [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 800.353156] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 800.353265] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bac1555a-7740-4b87-ae25-64a38f8a2c6b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.360942] env[68674]: DEBUG oslo_vmware.api [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 800.360942] env[68674]: value = "task-3240029" [ 800.360942] env[68674]: _type = "Task" [ 800.360942] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.377239] env[68674]: DEBUG oslo_vmware.api [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240029, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.521018] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d38a466a-0c14-4808-bd26-9d9dbf6832fb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.528278] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-515b4978-d1c9-4d9f-9a55-9f41d99f8326 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.541866] env[68674]: DEBUG oslo_vmware.api [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240028, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.571428] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fbf452b-e922-4a8f-ae6a-e8b28b552c7b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.580630] env[68674]: DEBUG nova.compute.manager [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 800.580946] env[68674]: DEBUG oslo_vmware.api [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52204a39-f3d3-2d5b-8eb3-10dde0fddda9, 'name': SearchDatastore_Task, 'duration_secs': 0.01385} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.583536] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea2cde5e-730d-4705-a64e-94d0204636d8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.586828] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-abc5196b-d664-47c1-9caf-5837c0d34fad {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.590066] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc011b03-6461-4cb0-a301-55f7ae79c31c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.610022] env[68674]: DEBUG oslo_vmware.api [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Waiting for the task: (returnval){ [ 800.610022] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528552b0-c436-2b0c-b20b-d1e2adcb6b28" [ 800.610022] env[68674]: _type = "Task" [ 800.610022] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.610022] env[68674]: DEBUG nova.compute.provider_tree [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 800.618707] env[68674]: DEBUG oslo_vmware.api [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528552b0-c436-2b0c-b20b-d1e2adcb6b28, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.770903] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 800.871337] env[68674]: DEBUG oslo_vmware.api [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240029, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.969571] env[68674]: DEBUG nova.compute.manager [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 800.991401] env[68674]: DEBUG nova.virt.hardware [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 800.992162] env[68674]: DEBUG nova.virt.hardware [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 800.992162] env[68674]: DEBUG nova.virt.hardware [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 800.992162] env[68674]: DEBUG nova.virt.hardware [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 800.992375] env[68674]: DEBUG nova.virt.hardware [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 800.992375] env[68674]: DEBUG nova.virt.hardware [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 800.992536] env[68674]: DEBUG nova.virt.hardware [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 800.992712] env[68674]: DEBUG nova.virt.hardware [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 800.992883] env[68674]: DEBUG nova.virt.hardware [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 800.993057] env[68674]: DEBUG nova.virt.hardware [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 800.993233] env[68674]: DEBUG nova.virt.hardware [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 800.994197] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b32b9929-3332-4274-8a0e-39eadf1dd937 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.003376] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9edf8cfc-ff03-4379-a213-5c25a57cf358 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.043388] env[68674]: DEBUG oslo_vmware.api [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240028, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.111866] env[68674]: INFO nova.compute.manager [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] instance snapshotting [ 801.115042] env[68674]: DEBUG nova.scheduler.client.report [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 801.124226] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abaabc3a-a811-4735-a13d-d4017c842eff {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.133846] env[68674]: DEBUG oslo_vmware.api [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528552b0-c436-2b0c-b20b-d1e2adcb6b28, 'name': SearchDatastore_Task, 'duration_secs': 0.022732} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.147442] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 801.147747] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] c4fd04a7-2b11-4c4b-84d1-53edc1e3f035/c4fd04a7-2b11-4c4b-84d1-53edc1e3f035.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 801.148586] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1edff817-06b4-4926-9758-612486e9783b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.151073] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f5ac590-fac8-4591-8feb-c6e705285a44 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.166090] env[68674]: DEBUG oslo_vmware.api [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Waiting for the task: (returnval){ [ 801.166090] env[68674]: value = "task-3240030" [ 801.166090] env[68674]: _type = "Task" [ 801.166090] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.172681] env[68674]: DEBUG oslo_vmware.api [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3240030, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.276866] env[68674]: INFO nova.compute.manager [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Rebuilding instance [ 801.320789] env[68674]: DEBUG nova.compute.manager [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 801.320789] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac69a368-fb4c-40f6-949a-073b58b2b049 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.372705] env[68674]: DEBUG oslo_vmware.api [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240029, 'name': PowerOffVM_Task, 'duration_secs': 0.708414} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.373325] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 801.374168] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00a0a213-b1d6-4be0-9434-374678055761 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.394249] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc53baae-c728-4922-9db4-066d5aef96f8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.424611] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 801.424899] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-32fa8879-7a90-434f-8357-0f934e84a63a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.434090] env[68674]: DEBUG oslo_vmware.api [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 801.434090] env[68674]: value = "task-3240031" [ 801.434090] env[68674]: _type = "Task" [ 801.434090] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.448680] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] VM already powered off {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 801.448878] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 801.449487] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.449705] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 801.453033] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 801.453033] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7edfac8d-9d01-4127-99ed-75a43b21fa04 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.460098] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 801.460284] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 801.461026] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc8651bb-4c2e-4e21-addf-464ffc17e183 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.466905] env[68674]: DEBUG oslo_vmware.api [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 801.466905] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52997080-6992-2f34-f65e-b62358e95de8" [ 801.466905] env[68674]: _type = "Task" [ 801.466905] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.477570] env[68674]: DEBUG oslo_vmware.api [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52997080-6992-2f34-f65e-b62358e95de8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.544541] env[68674]: DEBUG oslo_vmware.api [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240028, 'name': RemoveSnapshot_Task, 'duration_secs': 1.032211} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.544964] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Deleted Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 801.545288] env[68674]: INFO nova.compute.manager [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Took 16.78 seconds to snapshot the instance on the hypervisor. [ 801.628160] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.712s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 801.628746] env[68674]: DEBUG nova.compute.manager [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 801.632876] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8d43ef9d-ed2e-43da-a7c8-d1a410226ecf tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.390s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 801.633487] env[68674]: DEBUG nova.objects.instance [None req-8d43ef9d-ed2e-43da-a7c8-d1a410226ecf tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lazy-loading 'resources' on Instance uuid 1699f556-d451-40e3-a213-7edb753b03f1 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 801.646114] env[68674]: DEBUG nova.network.neutron [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Successfully updated port: 0a9b5c9d-f85d-42f8-9e17-5a63a48e7ea4 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 801.666264] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Creating Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 801.666702] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f07cd716-c6d4-47fc-9e42-dad48a9f8d2f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.680823] env[68674]: DEBUG oslo_vmware.api [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3240030, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.682718] env[68674]: DEBUG oslo_vmware.api [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 801.682718] env[68674]: value = "task-3240032" [ 801.682718] env[68674]: _type = "Task" [ 801.682718] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.693928] env[68674]: DEBUG oslo_vmware.api [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240032, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.755182] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquiring lock "63d6c185-db2c-4ede-a716-9a0dd432ab1f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 801.755513] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "63d6c185-db2c-4ede-a716-9a0dd432ab1f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 801.783224] env[68674]: DEBUG nova.compute.manager [req-0144c1e2-6b6d-4f7a-89cd-7d687666ba0b req-29489fdf-5730-402e-bde0-de3035b01939 service nova] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Received event network-changed-4b49219f-9f62-4839-b5a2-eb1116da215c {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 801.783413] env[68674]: DEBUG nova.compute.manager [req-0144c1e2-6b6d-4f7a-89cd-7d687666ba0b req-29489fdf-5730-402e-bde0-de3035b01939 service nova] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Refreshing instance network info cache due to event network-changed-4b49219f-9f62-4839-b5a2-eb1116da215c. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 801.783649] env[68674]: DEBUG oslo_concurrency.lockutils [req-0144c1e2-6b6d-4f7a-89cd-7d687666ba0b req-29489fdf-5730-402e-bde0-de3035b01939 service nova] Acquiring lock "refresh_cache-c4fd04a7-2b11-4c4b-84d1-53edc1e3f035" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.783821] env[68674]: DEBUG oslo_concurrency.lockutils [req-0144c1e2-6b6d-4f7a-89cd-7d687666ba0b req-29489fdf-5730-402e-bde0-de3035b01939 service nova] Acquired lock "refresh_cache-c4fd04a7-2b11-4c4b-84d1-53edc1e3f035" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 801.783994] env[68674]: DEBUG nova.network.neutron [req-0144c1e2-6b6d-4f7a-89cd-7d687666ba0b req-29489fdf-5730-402e-bde0-de3035b01939 service nova] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Refreshing network info cache for port 4b49219f-9f62-4839-b5a2-eb1116da215c {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 801.979035] env[68674]: DEBUG oslo_vmware.api [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52997080-6992-2f34-f65e-b62358e95de8, 'name': SearchDatastore_Task, 'duration_secs': 0.010677} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.983062] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dff18f49-1423-4144-82f8-0996876046ef {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.985971] env[68674]: DEBUG oslo_vmware.api [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 801.985971] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52489e14-518a-6af5-eec4-864dd44469f9" [ 801.985971] env[68674]: _type = "Task" [ 801.985971] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.994803] env[68674]: DEBUG oslo_vmware.api [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52489e14-518a-6af5-eec4-864dd44469f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.092154] env[68674]: DEBUG nova.compute.manager [None req-f875b2cd-e244-4dfc-87de-4411043a9cfd tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Found 2 images (rotation: 2) {{(pid=68674) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 802.137672] env[68674]: DEBUG nova.compute.utils [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 802.139729] env[68674]: DEBUG nova.compute.manager [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 802.139906] env[68674]: DEBUG nova.network.neutron [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 802.148539] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Acquiring lock "refresh_cache-7329e503-d87d-4e15-b181-65ac6e376781" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.148724] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Acquired lock "refresh_cache-7329e503-d87d-4e15-b181-65ac6e376781" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 802.148959] env[68674]: DEBUG nova.network.neutron [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 802.182484] env[68674]: DEBUG oslo_vmware.api [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3240030, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.184087] env[68674]: DEBUG nova.policy [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '50e46e8c9fbc4778b5f89359ae81bfa3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f6b179855b874365964446f95f9f5a53', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 802.198336] env[68674]: DEBUG oslo_vmware.api [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240032, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.260119] env[68674]: DEBUG nova.compute.manager [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 802.336918] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 802.337498] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ed8ef9ff-127f-4dae-a440-5e40d7c9cc0f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.345428] env[68674]: DEBUG oslo_vmware.api [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Waiting for the task: (returnval){ [ 802.345428] env[68674]: value = "task-3240033" [ 802.345428] env[68674]: _type = "Task" [ 802.345428] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.356689] env[68674]: DEBUG oslo_vmware.api [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': task-3240033, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.498174] env[68674]: DEBUG oslo_vmware.api [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52489e14-518a-6af5-eec4-864dd44469f9, 'name': SearchDatastore_Task, 'duration_secs': 0.019462} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.501467] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 802.503035] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 5c12cb5d-821c-4e63-86a0-dadc9794a8ba/b84d9354-ef6b-46ca-9dae-6549fa89bbea-rescue.vmdk. {{(pid=68674) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 802.503035] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4f7e5b14-599f-4336-897d-83152ef4677c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.509771] env[68674]: DEBUG oslo_vmware.api [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 802.509771] env[68674]: value = "task-3240034" [ 802.509771] env[68674]: _type = "Task" [ 802.509771] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.521969] env[68674]: DEBUG oslo_vmware.api [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240034, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.551133] env[68674]: DEBUG nova.network.neutron [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Successfully created port: 9f0aa506-1438-47ac-871c-632df3f943bf {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 802.582291] env[68674]: DEBUG nova.network.neutron [req-0144c1e2-6b6d-4f7a-89cd-7d687666ba0b req-29489fdf-5730-402e-bde0-de3035b01939 service nova] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Updated VIF entry in instance network info cache for port 4b49219f-9f62-4839-b5a2-eb1116da215c. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 802.582696] env[68674]: DEBUG nova.network.neutron [req-0144c1e2-6b6d-4f7a-89cd-7d687666ba0b req-29489fdf-5730-402e-bde0-de3035b01939 service nova] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Updating instance_info_cache with network_info: [{"id": "4b49219f-9f62-4839-b5a2-eb1116da215c", "address": "fa:16:3e:0d:4d:8a", "network": {"id": "f11cd326-6319-47eb-833d-5282731628e9", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-287739122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e05a97545e94e8a9be8f382457d79b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f1b507ed-cd2d-4c09-9d96-c47bde6a7774", "external-id": "nsx-vlan-transportzone-980", "segmentation_id": 980, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b49219f-9f", "ovs_interfaceid": "4b49219f-9f62-4839-b5a2-eb1116da215c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.647025] env[68674]: DEBUG nova.compute.manager [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 802.673083] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10962932-26a6-4ed3-9363-d39e8208d554 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.681451] env[68674]: DEBUG oslo_vmware.api [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3240030, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.684624] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc46a056-66cc-4f79-9ca3-4c1975c14166 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.725248] env[68674]: DEBUG nova.network.neutron [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 802.728183] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed65069b-c07d-43d5-b470-3566d0f103e0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.730495] env[68674]: DEBUG oslo_vmware.api [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240032, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.736482] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d3ee3d9-5370-4374-98e7-6c8a9e495a2d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.752169] env[68674]: DEBUG nova.compute.provider_tree [None req-8d43ef9d-ed2e-43da-a7c8-d1a410226ecf tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 802.787864] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 802.856156] env[68674]: DEBUG oslo_vmware.api [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': task-3240033, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.904652] env[68674]: DEBUG nova.network.neutron [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Updating instance_info_cache with network_info: [{"id": "0a9b5c9d-f85d-42f8-9e17-5a63a48e7ea4", "address": "fa:16:3e:55:82:ec", "network": {"id": "69e2ef02-944e-40b2-88f2-3a00c754a5eb", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-125455610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "435fbf1f847d4d36ba126fc8c49b59fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3e0aae3-33d1-403b-bfaf-306f77a1422e", "external-id": "nsx-vlan-transportzone-211", "segmentation_id": 211, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a9b5c9d-f8", "ovs_interfaceid": "0a9b5c9d-f85d-42f8-9e17-5a63a48e7ea4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.021160] env[68674]: DEBUG oslo_vmware.api [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240034, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.086240] env[68674]: DEBUG oslo_concurrency.lockutils [req-0144c1e2-6b6d-4f7a-89cd-7d687666ba0b req-29489fdf-5730-402e-bde0-de3035b01939 service nova] Releasing lock "refresh_cache-c4fd04a7-2b11-4c4b-84d1-53edc1e3f035" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 803.087032] env[68674]: DEBUG nova.compute.manager [req-0144c1e2-6b6d-4f7a-89cd-7d687666ba0b req-29489fdf-5730-402e-bde0-de3035b01939 service nova] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Received event network-vif-plugged-0a9b5c9d-f85d-42f8-9e17-5a63a48e7ea4 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 803.087032] env[68674]: DEBUG oslo_concurrency.lockutils [req-0144c1e2-6b6d-4f7a-89cd-7d687666ba0b req-29489fdf-5730-402e-bde0-de3035b01939 service nova] Acquiring lock "7329e503-d87d-4e15-b181-65ac6e376781-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 803.087032] env[68674]: DEBUG oslo_concurrency.lockutils [req-0144c1e2-6b6d-4f7a-89cd-7d687666ba0b req-29489fdf-5730-402e-bde0-de3035b01939 service nova] Lock "7329e503-d87d-4e15-b181-65ac6e376781-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 803.087218] env[68674]: DEBUG oslo_concurrency.lockutils [req-0144c1e2-6b6d-4f7a-89cd-7d687666ba0b req-29489fdf-5730-402e-bde0-de3035b01939 service nova] Lock "7329e503-d87d-4e15-b181-65ac6e376781-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 803.087363] env[68674]: DEBUG nova.compute.manager [req-0144c1e2-6b6d-4f7a-89cd-7d687666ba0b req-29489fdf-5730-402e-bde0-de3035b01939 service nova] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] No waiting events found dispatching network-vif-plugged-0a9b5c9d-f85d-42f8-9e17-5a63a48e7ea4 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 803.087457] env[68674]: WARNING nova.compute.manager [req-0144c1e2-6b6d-4f7a-89cd-7d687666ba0b req-29489fdf-5730-402e-bde0-de3035b01939 service nova] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Received unexpected event network-vif-plugged-0a9b5c9d-f85d-42f8-9e17-5a63a48e7ea4 for instance with vm_state building and task_state spawning. [ 803.087596] env[68674]: DEBUG nova.compute.manager [req-0144c1e2-6b6d-4f7a-89cd-7d687666ba0b req-29489fdf-5730-402e-bde0-de3035b01939 service nova] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Received event network-changed-0a9b5c9d-f85d-42f8-9e17-5a63a48e7ea4 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 803.087753] env[68674]: DEBUG nova.compute.manager [req-0144c1e2-6b6d-4f7a-89cd-7d687666ba0b req-29489fdf-5730-402e-bde0-de3035b01939 service nova] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Refreshing instance network info cache due to event network-changed-0a9b5c9d-f85d-42f8-9e17-5a63a48e7ea4. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 803.087935] env[68674]: DEBUG oslo_concurrency.lockutils [req-0144c1e2-6b6d-4f7a-89cd-7d687666ba0b req-29489fdf-5730-402e-bde0-de3035b01939 service nova] Acquiring lock "refresh_cache-7329e503-d87d-4e15-b181-65ac6e376781" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.177667] env[68674]: DEBUG oslo_vmware.api [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3240030, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.658433} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.177960] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] c4fd04a7-2b11-4c4b-84d1-53edc1e3f035/c4fd04a7-2b11-4c4b-84d1-53edc1e3f035.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 803.178205] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 803.178463] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5b3a91b9-54b6-4f56-9ed7-a730e7c591a9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.187060] env[68674]: DEBUG oslo_vmware.api [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Waiting for the task: (returnval){ [ 803.187060] env[68674]: value = "task-3240035" [ 803.187060] env[68674]: _type = "Task" [ 803.187060] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.199266] env[68674]: DEBUG oslo_vmware.api [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3240035, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.203810] env[68674]: DEBUG oslo_vmware.api [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240032, 'name': CreateSnapshot_Task, 'duration_secs': 1.142015} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.204161] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Created Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 803.205045] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ab4795c-2a1d-43a2-bfc9-edc555c30bd5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.255726] env[68674]: DEBUG nova.scheduler.client.report [None req-8d43ef9d-ed2e-43da-a7c8-d1a410226ecf tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 803.356853] env[68674]: DEBUG oslo_vmware.api [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': task-3240033, 'name': PowerOffVM_Task, 'duration_secs': 0.61501} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.357271] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 803.357924] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 803.358712] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62be0352-6b47-48f3-a34c-5f25e5af7a60 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.367542] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 803.367782] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-08cb9e9f-49e4-4eb6-b5df-c593c4544841 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.395720] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 803.395998] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 803.396230] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Deleting the datastore file [datastore2] 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 803.396509] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-446cfd87-1684-4deb-861c-9cb9c1db30ad {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.403650] env[68674]: DEBUG oslo_vmware.api [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Waiting for the task: (returnval){ [ 803.403650] env[68674]: value = "task-3240037" [ 803.403650] env[68674]: _type = "Task" [ 803.403650] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.407349] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Releasing lock "refresh_cache-7329e503-d87d-4e15-b181-65ac6e376781" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 803.407645] env[68674]: DEBUG nova.compute.manager [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Instance network_info: |[{"id": "0a9b5c9d-f85d-42f8-9e17-5a63a48e7ea4", "address": "fa:16:3e:55:82:ec", "network": {"id": "69e2ef02-944e-40b2-88f2-3a00c754a5eb", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-125455610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "435fbf1f847d4d36ba126fc8c49b59fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3e0aae3-33d1-403b-bfaf-306f77a1422e", "external-id": "nsx-vlan-transportzone-211", "segmentation_id": 211, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a9b5c9d-f8", "ovs_interfaceid": "0a9b5c9d-f85d-42f8-9e17-5a63a48e7ea4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 803.408277] env[68674]: DEBUG oslo_concurrency.lockutils [req-0144c1e2-6b6d-4f7a-89cd-7d687666ba0b req-29489fdf-5730-402e-bde0-de3035b01939 service nova] Acquired lock "refresh_cache-7329e503-d87d-4e15-b181-65ac6e376781" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 803.408277] env[68674]: DEBUG nova.network.neutron [req-0144c1e2-6b6d-4f7a-89cd-7d687666ba0b req-29489fdf-5730-402e-bde0-de3035b01939 service nova] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Refreshing network info cache for port 0a9b5c9d-f85d-42f8-9e17-5a63a48e7ea4 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 803.409583] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:82:ec', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c3e0aae3-33d1-403b-bfaf-306f77a1422e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0a9b5c9d-f85d-42f8-9e17-5a63a48e7ea4', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 803.418988] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 803.420954] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 803.425034] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cac9f8ec-753e-4c10-b822-48e9889d5c10 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.440901] env[68674]: DEBUG oslo_vmware.api [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': task-3240037, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.446802] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 803.446802] env[68674]: value = "task-3240038" [ 803.446802] env[68674]: _type = "Task" [ 803.446802] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.455774] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240038, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.520870] env[68674]: DEBUG oslo_vmware.api [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240034, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.804691} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.522027] env[68674]: INFO nova.virt.vmwareapi.ds_util [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 5c12cb5d-821c-4e63-86a0-dadc9794a8ba/b84d9354-ef6b-46ca-9dae-6549fa89bbea-rescue.vmdk. [ 803.522027] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c66746c0-cc77-4e9c-932c-8ec09eef94d5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.548884] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Reconfiguring VM instance instance-00000036 to attach disk [datastore2] 5c12cb5d-821c-4e63-86a0-dadc9794a8ba/b84d9354-ef6b-46ca-9dae-6549fa89bbea-rescue.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 803.549132] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ee74b32-cb67-4b8b-86b9-916351ae415d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.569855] env[68674]: DEBUG oslo_vmware.api [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 803.569855] env[68674]: value = "task-3240039" [ 803.569855] env[68674]: _type = "Task" [ 803.569855] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.579214] env[68674]: DEBUG oslo_vmware.api [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240039, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.645357] env[68674]: DEBUG oslo_vmware.rw_handles [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52629a0a-59e3-2ce5-1f36-1100082585c3/disk-0.vmdk. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 803.646412] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ac90919-7db2-4101-bede-f4d6358b1ab3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.654926] env[68674]: DEBUG oslo_vmware.rw_handles [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52629a0a-59e3-2ce5-1f36-1100082585c3/disk-0.vmdk is in state: ready. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 803.655180] env[68674]: ERROR oslo_vmware.rw_handles [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52629a0a-59e3-2ce5-1f36-1100082585c3/disk-0.vmdk due to incomplete transfer. [ 803.655482] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-28222254-19f0-4403-827f-b36ffc4b6725 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.657942] env[68674]: DEBUG nova.compute.manager [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 803.660731] env[68674]: DEBUG nova.compute.manager [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 803.662313] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb224056-9272-46c6-9a8a-4155cab12aa6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.666820] env[68674]: DEBUG oslo_vmware.rw_handles [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52629a0a-59e3-2ce5-1f36-1100082585c3/disk-0.vmdk. {{(pid=68674) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 803.667196] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Uploaded image 819147a7-c618-43d3-9313-86c442b2d2a3 to the Glance image server {{(pid=68674) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 803.669076] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Destroying the VM {{(pid=68674) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 803.670757] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-174d11cd-bd90-433c-bcde-5fe336af6426 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.682422] env[68674]: DEBUG nova.virt.hardware [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 803.682703] env[68674]: DEBUG nova.virt.hardware [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 803.682834] env[68674]: DEBUG nova.virt.hardware [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 803.683054] env[68674]: DEBUG nova.virt.hardware [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 803.683213] env[68674]: DEBUG nova.virt.hardware [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 803.683362] env[68674]: DEBUG nova.virt.hardware [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 803.683569] env[68674]: DEBUG nova.virt.hardware [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 803.683729] env[68674]: DEBUG nova.virt.hardware [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 803.683925] env[68674]: DEBUG nova.virt.hardware [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 803.684119] env[68674]: DEBUG nova.virt.hardware [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 803.684295] env[68674]: DEBUG nova.virt.hardware [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 803.685130] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5234b68a-ef4b-4791-af53-8e6dea7123c0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.688971] env[68674]: DEBUG oslo_vmware.api [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for the task: (returnval){ [ 803.688971] env[68674]: value = "task-3240040" [ 803.688971] env[68674]: _type = "Task" [ 803.688971] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.699036] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6313bea6-1599-4845-92c8-72e1ccfeaaa4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.709028] env[68674]: DEBUG oslo_vmware.api [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3240035, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084473} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.709681] env[68674]: DEBUG oslo_vmware.api [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3240040, 'name': Destroy_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.710371] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 803.710999] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-684ffcea-21b6-407a-93f1-37935a135353 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.729921] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Creating linked-clone VM from snapshot {{(pid=68674) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 803.731047] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ff20beaa-5e72-4a8e-b20d-3e5035c13883 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.752064] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] c4fd04a7-2b11-4c4b-84d1-53edc1e3f035/c4fd04a7-2b11-4c4b-84d1-53edc1e3f035.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 803.752754] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad3711da-4a0d-42bb-af4c-71770fa9715b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.769429] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8d43ef9d-ed2e-43da-a7c8-d1a410226ecf tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.137s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 803.771524] env[68674]: DEBUG oslo_vmware.api [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 803.771524] env[68674]: value = "task-3240041" [ 803.771524] env[68674]: _type = "Task" [ 803.771524] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.772037] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5aa7de98-f413-4bc1-9643-9089b02545e8 tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 44.082s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 803.772297] env[68674]: DEBUG nova.objects.instance [None req-5aa7de98-f413-4bc1-9643-9089b02545e8 tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lazy-loading 'resources' on Instance uuid 0e3c27fe-a2d9-45dc-9559-a678f90a6fef {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 803.778952] env[68674]: DEBUG oslo_vmware.api [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Waiting for the task: (returnval){ [ 803.778952] env[68674]: value = "task-3240042" [ 803.778952] env[68674]: _type = "Task" [ 803.778952] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.788236] env[68674]: DEBUG oslo_vmware.api [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240041, 'name': CloneVM_Task} progress is 12%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.792750] env[68674]: DEBUG oslo_vmware.api [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3240042, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.796227] env[68674]: INFO nova.scheduler.client.report [None req-8d43ef9d-ed2e-43da-a7c8-d1a410226ecf tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Deleted allocations for instance 1699f556-d451-40e3-a213-7edb753b03f1 [ 803.913736] env[68674]: DEBUG oslo_vmware.api [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': task-3240037, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.125552} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.914403] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 803.914403] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 803.914488] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 803.961295] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240038, 'name': CreateVM_Task, 'duration_secs': 0.367989} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.961547] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 803.962381] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.962597] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 803.962998] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 803.963297] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23742022-0fa9-4a7d-a034-af03a8be9dee {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.968657] env[68674]: DEBUG oslo_vmware.api [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Waiting for the task: (returnval){ [ 803.968657] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f72bdc-fdd9-6417-4636-c3692281a43d" [ 803.968657] env[68674]: _type = "Task" [ 803.968657] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.977272] env[68674]: DEBUG oslo_vmware.api [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f72bdc-fdd9-6417-4636-c3692281a43d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.084014] env[68674]: DEBUG oslo_vmware.api [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240039, 'name': ReconfigVM_Task, 'duration_secs': 0.320878} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.084484] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Reconfigured VM instance instance-00000036 to attach disk [datastore2] 5c12cb5d-821c-4e63-86a0-dadc9794a8ba/b84d9354-ef6b-46ca-9dae-6549fa89bbea-rescue.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 804.089470] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44e19fb0-87fc-4dc3-b2c3-9e0f9bc69d31 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.097204] env[68674]: DEBUG nova.compute.manager [req-765f9d9a-e92e-41a8-bb8d-b906a18c1edc req-d3a00f4b-5299-474f-b938-402dd5dd1ff3 service nova] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Received event network-vif-plugged-9f0aa506-1438-47ac-871c-632df3f943bf {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 804.097435] env[68674]: DEBUG oslo_concurrency.lockutils [req-765f9d9a-e92e-41a8-bb8d-b906a18c1edc req-d3a00f4b-5299-474f-b938-402dd5dd1ff3 service nova] Acquiring lock "e1283f87-5bdb-4d4e-a1c5-f3b1c9180188-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 804.097926] env[68674]: DEBUG oslo_concurrency.lockutils [req-765f9d9a-e92e-41a8-bb8d-b906a18c1edc req-d3a00f4b-5299-474f-b938-402dd5dd1ff3 service nova] Lock "e1283f87-5bdb-4d4e-a1c5-f3b1c9180188-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 804.098150] env[68674]: DEBUG oslo_concurrency.lockutils [req-765f9d9a-e92e-41a8-bb8d-b906a18c1edc req-d3a00f4b-5299-474f-b938-402dd5dd1ff3 service nova] Lock "e1283f87-5bdb-4d4e-a1c5-f3b1c9180188-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 804.098337] env[68674]: DEBUG nova.compute.manager [req-765f9d9a-e92e-41a8-bb8d-b906a18c1edc req-d3a00f4b-5299-474f-b938-402dd5dd1ff3 service nova] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] No waiting events found dispatching network-vif-plugged-9f0aa506-1438-47ac-871c-632df3f943bf {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 804.098510] env[68674]: WARNING nova.compute.manager [req-765f9d9a-e92e-41a8-bb8d-b906a18c1edc req-d3a00f4b-5299-474f-b938-402dd5dd1ff3 service nova] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Received unexpected event network-vif-plugged-9f0aa506-1438-47ac-871c-632df3f943bf for instance with vm_state building and task_state spawning. [ 804.141493] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8295a56d-9f7a-4d23-b210-e0e1e633296c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.157986] env[68674]: DEBUG oslo_vmware.api [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 804.157986] env[68674]: value = "task-3240043" [ 804.157986] env[68674]: _type = "Task" [ 804.157986] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.170178] env[68674]: DEBUG oslo_vmware.api [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240043, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.178477] env[68674]: INFO nova.compute.manager [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] instance snapshotting [ 804.179250] env[68674]: DEBUG nova.objects.instance [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lazy-loading 'flavor' on Instance uuid 3d85c8c4-f09c-4f75-aff5-9a49d84ae006 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 804.184015] env[68674]: DEBUG nova.network.neutron [req-0144c1e2-6b6d-4f7a-89cd-7d687666ba0b req-29489fdf-5730-402e-bde0-de3035b01939 service nova] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Updated VIF entry in instance network info cache for port 0a9b5c9d-f85d-42f8-9e17-5a63a48e7ea4. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 804.184773] env[68674]: DEBUG nova.network.neutron [req-0144c1e2-6b6d-4f7a-89cd-7d687666ba0b req-29489fdf-5730-402e-bde0-de3035b01939 service nova] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Updating instance_info_cache with network_info: [{"id": "0a9b5c9d-f85d-42f8-9e17-5a63a48e7ea4", "address": "fa:16:3e:55:82:ec", "network": {"id": "69e2ef02-944e-40b2-88f2-3a00c754a5eb", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-125455610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "435fbf1f847d4d36ba126fc8c49b59fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3e0aae3-33d1-403b-bfaf-306f77a1422e", "external-id": "nsx-vlan-transportzone-211", "segmentation_id": 211, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a9b5c9d-f8", "ovs_interfaceid": "0a9b5c9d-f85d-42f8-9e17-5a63a48e7ea4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.198373] env[68674]: DEBUG nova.network.neutron [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Successfully updated port: 9f0aa506-1438-47ac-871c-632df3f943bf {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 804.205335] env[68674]: DEBUG oslo_vmware.api [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3240040, 'name': Destroy_Task} progress is 33%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.290030] env[68674]: DEBUG oslo_vmware.api [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240041, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.296300] env[68674]: DEBUG oslo_vmware.api [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3240042, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.305714] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8d43ef9d-ed2e-43da-a7c8-d1a410226ecf tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "1699f556-d451-40e3-a213-7edb753b03f1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 48.843s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 804.483695] env[68674]: DEBUG oslo_vmware.api [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f72bdc-fdd9-6417-4636-c3692281a43d, 'name': SearchDatastore_Task, 'duration_secs': 0.015502} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.484059] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 804.485028] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 804.485028] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.485028] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 804.485028] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 804.485338] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-848ee762-fe56-4aa7-ae0b-2a0803b90d95 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.497851] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 804.497851] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 804.498708] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2ca3b18-06e4-48ff-b4c8-d0f6367120fe {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.509027] env[68674]: DEBUG oslo_vmware.api [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Waiting for the task: (returnval){ [ 804.509027] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c49f76-2556-efed-c5b7-a9024ec04536" [ 804.509027] env[68674]: _type = "Task" [ 804.509027] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.517167] env[68674]: DEBUG oslo_vmware.api [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c49f76-2556-efed-c5b7-a9024ec04536, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.670485] env[68674]: DEBUG oslo_vmware.api [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240043, 'name': ReconfigVM_Task, 'duration_secs': 0.156264} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.670879] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 804.671175] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-37b703a2-8260-4f19-ad43-d8bd1fbf9b18 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.677913] env[68674]: DEBUG oslo_vmware.api [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 804.677913] env[68674]: value = "task-3240044" [ 804.677913] env[68674]: _type = "Task" [ 804.677913] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.684612] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ed4f330-e62d-4e4b-8c4d-efd852441cb3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.689778] env[68674]: DEBUG oslo_concurrency.lockutils [req-0144c1e2-6b6d-4f7a-89cd-7d687666ba0b req-29489fdf-5730-402e-bde0-de3035b01939 service nova] Releasing lock "refresh_cache-7329e503-d87d-4e15-b181-65ac6e376781" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 804.694187] env[68674]: DEBUG oslo_vmware.api [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240044, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.712914] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "refresh_cache-e1283f87-5bdb-4d4e-a1c5-f3b1c9180188" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.713164] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquired lock "refresh_cache-e1283f87-5bdb-4d4e-a1c5-f3b1c9180188" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 804.713413] env[68674]: DEBUG nova.network.neutron [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 804.719336] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8704ab8-ad9b-40b9-b5fa-f4db818288a4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.725617] env[68674]: DEBUG oslo_vmware.api [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3240040, 'name': Destroy_Task, 'duration_secs': 0.827619} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.726823] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Destroyed the VM [ 804.727088] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Deleting Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 804.727412] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-e0bb2d6b-836b-4f63-8afd-8bad876c0cb1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.735777] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a0b8110-2881-4b94-81e5-c4d5188faea2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.740310] env[68674]: DEBUG oslo_vmware.api [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for the task: (returnval){ [ 804.740310] env[68674]: value = "task-3240045" [ 804.740310] env[68674]: _type = "Task" [ 804.740310] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.749605] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20a94159-d739-46c7-aa1a-ab107bf7407b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.755329] env[68674]: DEBUG oslo_vmware.api [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3240045, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.787378] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1adb389e-9304-49dd-b995-f4a577d51bf3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.805242] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d0034a4-b8dc-4401-98be-1fcabed82e89 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.810183] env[68674]: DEBUG oslo_vmware.api [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3240042, 'name': ReconfigVM_Task, 'duration_secs': 0.910502} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.810384] env[68674]: DEBUG oslo_vmware.api [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240041, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.810886] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Reconfigured VM instance instance-00000039 to attach disk [datastore1] c4fd04a7-2b11-4c4b-84d1-53edc1e3f035/c4fd04a7-2b11-4c4b-84d1-53edc1e3f035.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 804.811214] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=68674) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 804.812301] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-aebcc32e-5c08-4c6d-b7d8-12d0fbcba6f9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.822892] env[68674]: DEBUG nova.compute.provider_tree [None req-5aa7de98-f413-4bc1-9643-9089b02545e8 tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 804.829781] env[68674]: DEBUG oslo_vmware.api [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Waiting for the task: (returnval){ [ 804.829781] env[68674]: value = "task-3240046" [ 804.829781] env[68674]: _type = "Task" [ 804.829781] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.839218] env[68674]: DEBUG oslo_vmware.api [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3240046, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.946362] env[68674]: DEBUG nova.virt.hardware [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 804.946669] env[68674]: DEBUG nova.virt.hardware [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 804.946861] env[68674]: DEBUG nova.virt.hardware [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 804.947134] env[68674]: DEBUG nova.virt.hardware [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 804.947389] env[68674]: DEBUG nova.virt.hardware [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 804.947597] env[68674]: DEBUG nova.virt.hardware [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 804.947859] env[68674]: DEBUG nova.virt.hardware [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 804.948090] env[68674]: DEBUG nova.virt.hardware [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 804.948332] env[68674]: DEBUG nova.virt.hardware [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 804.948550] env[68674]: DEBUG nova.virt.hardware [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 804.948784] env[68674]: DEBUG nova.virt.hardware [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 804.949712] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f16a6860-7a5c-442c-acef-0fc7d95763e5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.958667] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd73001d-8014-4192-a5f9-34a4c7c71fbd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.975364] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Instance VIF info [] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 804.981030] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 804.981332] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 804.981576] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7cbd2658-c4dc-4c74-b8ce-124821f16ed4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.999766] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 804.999766] env[68674]: value = "task-3240047" [ 804.999766] env[68674]: _type = "Task" [ 804.999766] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.009295] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240047, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.017565] env[68674]: DEBUG oslo_vmware.api [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c49f76-2556-efed-c5b7-a9024ec04536, 'name': SearchDatastore_Task, 'duration_secs': 0.019804} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.018348] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2afaee5-93cb-46c1-9eb5-447ba99e594a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.025034] env[68674]: DEBUG oslo_vmware.api [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Waiting for the task: (returnval){ [ 805.025034] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ebbab9-1221-1c31-517d-05928ee8f2d0" [ 805.025034] env[68674]: _type = "Task" [ 805.025034] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.034613] env[68674]: DEBUG oslo_vmware.api [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ebbab9-1221-1c31-517d-05928ee8f2d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.188115] env[68674]: DEBUG oslo_vmware.api [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240044, 'name': PowerOnVM_Task, 'duration_secs': 0.395403} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.188544] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 805.191730] env[68674]: DEBUG nova.compute.manager [None req-c054ec11-0270-47de-ba0c-98d9904116dc tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 805.192615] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd42e2e0-5212-42b7-a8e9-41283d20212f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.241469] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Creating Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 805.242244] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-a0e5a4b7-c620-47e0-a8b6-2fe741c6155b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.250728] env[68674]: DEBUG nova.network.neutron [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 805.257913] env[68674]: DEBUG oslo_vmware.api [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 805.257913] env[68674]: value = "task-3240048" [ 805.257913] env[68674]: _type = "Task" [ 805.257913] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.258552] env[68674]: DEBUG oslo_vmware.api [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3240045, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.267240] env[68674]: DEBUG oslo_vmware.api [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240048, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.293589] env[68674]: DEBUG oslo_vmware.api [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240041, 'name': CloneVM_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.329545] env[68674]: DEBUG nova.scheduler.client.report [None req-5aa7de98-f413-4bc1-9643-9089b02545e8 tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 805.341380] env[68674]: DEBUG oslo_vmware.api [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3240046, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.054025} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.343908] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=68674) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 805.344686] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c11b351-ed65-47ba-9bb1-278a81861b3a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.369939] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] c4fd04a7-2b11-4c4b-84d1-53edc1e3f035/ephemeral_0.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 805.370498] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b8984129-c719-4480-9a93-b91971ebf0d7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.391052] env[68674]: DEBUG oslo_vmware.api [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Waiting for the task: (returnval){ [ 805.391052] env[68674]: value = "task-3240049" [ 805.391052] env[68674]: _type = "Task" [ 805.391052] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.398684] env[68674]: DEBUG oslo_vmware.api [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3240049, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.424461] env[68674]: DEBUG nova.network.neutron [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Updating instance_info_cache with network_info: [{"id": "9f0aa506-1438-47ac-871c-632df3f943bf", "address": "fa:16:3e:e9:fe:db", "network": {"id": "d412f884-932c-461f-8f04-990897b04532", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-692483706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6b179855b874365964446f95f9f5a53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f0aa506-14", "ovs_interfaceid": "9f0aa506-1438-47ac-871c-632df3f943bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.517100] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240047, 'name': CreateVM_Task, 'duration_secs': 0.292567} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.517265] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 805.518028] env[68674]: DEBUG oslo_concurrency.lockutils [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.518028] env[68674]: DEBUG oslo_concurrency.lockutils [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 805.518186] env[68674]: DEBUG oslo_concurrency.lockutils [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 805.519048] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06bc3f12-aca1-40c3-bb3e-8eb0f52e0038 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.524041] env[68674]: DEBUG oslo_vmware.api [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Waiting for the task: (returnval){ [ 805.524041] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525528e3-4fd5-d53e-4acf-f2b22ad7b8e7" [ 805.524041] env[68674]: _type = "Task" [ 805.524041] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.534537] env[68674]: DEBUG oslo_vmware.api [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525528e3-4fd5-d53e-4acf-f2b22ad7b8e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.537473] env[68674]: DEBUG oslo_vmware.api [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ebbab9-1221-1c31-517d-05928ee8f2d0, 'name': SearchDatastore_Task, 'duration_secs': 0.012526} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.537713] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 805.537963] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 7329e503-d87d-4e15-b181-65ac6e376781/7329e503-d87d-4e15-b181-65ac6e376781.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 805.538225] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-190466fd-8fa3-4378-893d-2705badd4814 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.545741] env[68674]: DEBUG oslo_vmware.api [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Waiting for the task: (returnval){ [ 805.545741] env[68674]: value = "task-3240050" [ 805.545741] env[68674]: _type = "Task" [ 805.545741] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.552518] env[68674]: DEBUG oslo_vmware.api [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3240050, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.751917] env[68674]: DEBUG oslo_vmware.api [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3240045, 'name': RemoveSnapshot_Task, 'duration_secs': 0.593788} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.752234] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Deleted Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 805.753030] env[68674]: INFO nova.compute.manager [None req-af6707b8-b887-47b2-ab5a-a02608cefa6d tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Took 18.03 seconds to snapshot the instance on the hypervisor. [ 805.770154] env[68674]: DEBUG oslo_vmware.api [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240048, 'name': CreateSnapshot_Task, 'duration_secs': 0.484274} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.770448] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Created Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 805.771728] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cc9c6a2-54c2-48ab-b946-e4d50416c46e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.793993] env[68674]: DEBUG oslo_vmware.api [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240041, 'name': CloneVM_Task, 'duration_secs': 1.632451} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.794328] env[68674]: INFO nova.virt.vmwareapi.vmops [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Created linked-clone VM from snapshot [ 805.795096] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fc82835-6e19-4fdd-9f2c-7dec65a38537 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.804666] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Uploading image bd367444-bce1-48c0-91d9-30bd5d973e39 {{(pid=68674) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 805.830555] env[68674]: DEBUG oslo_vmware.rw_handles [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 805.830555] env[68674]: value = "vm-647555" [ 805.830555] env[68674]: _type = "VirtualMachine" [ 805.830555] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 805.831065] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-00cc3175-606e-47c7-8b58-d4d398d825e8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.837120] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5aa7de98-f413-4bc1-9643-9089b02545e8 tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.065s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 805.841112] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 42.161s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 805.842992] env[68674]: INFO nova.compute.claims [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 805.848036] env[68674]: DEBUG oslo_vmware.rw_handles [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lease: (returnval){ [ 805.848036] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ec416c-6365-abff-66cf-0b4d6f126d24" [ 805.848036] env[68674]: _type = "HttpNfcLease" [ 805.848036] env[68674]: } obtained for exporting VM: (result){ [ 805.848036] env[68674]: value = "vm-647555" [ 805.848036] env[68674]: _type = "VirtualMachine" [ 805.848036] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 805.848036] env[68674]: DEBUG oslo_vmware.api [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the lease: (returnval){ [ 805.848036] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ec416c-6365-abff-66cf-0b4d6f126d24" [ 805.848036] env[68674]: _type = "HttpNfcLease" [ 805.848036] env[68674]: } to be ready. {{(pid=68674) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 805.855667] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 805.855667] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ec416c-6365-abff-66cf-0b4d6f126d24" [ 805.855667] env[68674]: _type = "HttpNfcLease" [ 805.855667] env[68674]: } is ready. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 805.856012] env[68674]: DEBUG oslo_vmware.rw_handles [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 805.856012] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ec416c-6365-abff-66cf-0b4d6f126d24" [ 805.856012] env[68674]: _type = "HttpNfcLease" [ 805.856012] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 805.857242] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56de40d1-272c-4514-a5a6-f00f26437972 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.867707] env[68674]: DEBUG oslo_vmware.rw_handles [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a147bd-bf8a-e67c-0343-b0f68d8aa40d/disk-0.vmdk from lease info. {{(pid=68674) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 805.868076] env[68674]: DEBUG oslo_vmware.rw_handles [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a147bd-bf8a-e67c-0343-b0f68d8aa40d/disk-0.vmdk for reading. {{(pid=68674) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 805.938369] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Releasing lock "refresh_cache-e1283f87-5bdb-4d4e-a1c5-f3b1c9180188" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 805.938885] env[68674]: DEBUG nova.compute.manager [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Instance network_info: |[{"id": "9f0aa506-1438-47ac-871c-632df3f943bf", "address": "fa:16:3e:e9:fe:db", "network": {"id": "d412f884-932c-461f-8f04-990897b04532", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-692483706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6b179855b874365964446f95f9f5a53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f0aa506-14", "ovs_interfaceid": "9f0aa506-1438-47ac-871c-632df3f943bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 805.940175] env[68674]: INFO nova.scheduler.client.report [None req-5aa7de98-f413-4bc1-9643-9089b02545e8 tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Deleted allocations for instance 0e3c27fe-a2d9-45dc-9559-a678f90a6fef [ 805.946142] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e9:fe:db', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '08fb4857-7f9b-4f97-86ef-415341fb595d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9f0aa506-1438-47ac-871c-632df3f943bf', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 805.955662] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 805.963504] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 805.963504] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9fa065b1-7345-458b-9ef9-7b2a0fc4ec0e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.984718] env[68674]: DEBUG oslo_vmware.api [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3240049, 'name': ReconfigVM_Task, 'duration_secs': 0.316989} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.985591] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Reconfigured VM instance instance-00000039 to attach disk [datastore1] c4fd04a7-2b11-4c4b-84d1-53edc1e3f035/ephemeral_0.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 805.987475] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0e13cb42-f8e4-4162-8581-33e914880cf3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.990149] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 805.990149] env[68674]: value = "task-3240052" [ 805.990149] env[68674]: _type = "Task" [ 805.990149] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.997897] env[68674]: DEBUG oslo_vmware.api [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Waiting for the task: (returnval){ [ 805.997897] env[68674]: value = "task-3240053" [ 805.997897] env[68674]: _type = "Task" [ 805.997897] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.006133] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240052, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.007436] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-3d7a581d-3c34-45c0-868b-9b7e9973f113 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.018074] env[68674]: DEBUG oslo_vmware.api [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3240053, 'name': Rename_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.033960] env[68674]: DEBUG oslo_vmware.api [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525528e3-4fd5-d53e-4acf-f2b22ad7b8e7, 'name': SearchDatastore_Task, 'duration_secs': 0.009494} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.034368] env[68674]: DEBUG oslo_concurrency.lockutils [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 806.034620] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 806.034893] env[68674]: DEBUG oslo_concurrency.lockutils [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.035154] env[68674]: DEBUG oslo_concurrency.lockutils [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 806.035903] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 806.035903] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c1aa424c-667c-4b2f-a935-81498b2e1bf0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.044260] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 806.044625] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 806.045549] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab8cabbc-303c-4534-a7b1-73573f20dca1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.060642] env[68674]: DEBUG oslo_vmware.api [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Waiting for the task: (returnval){ [ 806.060642] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525f06d3-f7e4-44a6-6ca4-54a3d2f97a81" [ 806.060642] env[68674]: _type = "Task" [ 806.060642] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.069089] env[68674]: DEBUG oslo_vmware.api [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3240050, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.489} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.069089] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 7329e503-d87d-4e15-b181-65ac6e376781/7329e503-d87d-4e15-b181-65ac6e376781.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 806.069089] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 806.069296] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fcdcfed4-0b45-4bbc-b18b-f6be6d107c60 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.074632] env[68674]: DEBUG oslo_vmware.api [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525f06d3-f7e4-44a6-6ca4-54a3d2f97a81, 'name': SearchDatastore_Task, 'duration_secs': 0.008237} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.075809] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98462ee4-fda5-40f4-bae7-9ae947cf0054 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.079752] env[68674]: DEBUG oslo_vmware.api [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Waiting for the task: (returnval){ [ 806.079752] env[68674]: value = "task-3240054" [ 806.079752] env[68674]: _type = "Task" [ 806.079752] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.084500] env[68674]: DEBUG oslo_vmware.api [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Waiting for the task: (returnval){ [ 806.084500] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a6383b-e559-9101-b6a2-1df2fb8005b1" [ 806.084500] env[68674]: _type = "Task" [ 806.084500] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.091276] env[68674]: DEBUG oslo_vmware.api [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3240054, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.096176] env[68674]: DEBUG oslo_vmware.api [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a6383b-e559-9101-b6a2-1df2fb8005b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.139878] env[68674]: DEBUG nova.compute.manager [req-1e4d0877-8cea-413d-a5a7-e33e90536134 req-010daf30-e05a-4ca0-95f5-daa2878b7f5d service nova] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Received event network-changed-9f0aa506-1438-47ac-871c-632df3f943bf {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 806.140132] env[68674]: DEBUG nova.compute.manager [req-1e4d0877-8cea-413d-a5a7-e33e90536134 req-010daf30-e05a-4ca0-95f5-daa2878b7f5d service nova] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Refreshing instance network info cache due to event network-changed-9f0aa506-1438-47ac-871c-632df3f943bf. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 806.140371] env[68674]: DEBUG oslo_concurrency.lockutils [req-1e4d0877-8cea-413d-a5a7-e33e90536134 req-010daf30-e05a-4ca0-95f5-daa2878b7f5d service nova] Acquiring lock "refresh_cache-e1283f87-5bdb-4d4e-a1c5-f3b1c9180188" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.140537] env[68674]: DEBUG oslo_concurrency.lockutils [req-1e4d0877-8cea-413d-a5a7-e33e90536134 req-010daf30-e05a-4ca0-95f5-daa2878b7f5d service nova] Acquired lock "refresh_cache-e1283f87-5bdb-4d4e-a1c5-f3b1c9180188" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 806.140709] env[68674]: DEBUG nova.network.neutron [req-1e4d0877-8cea-413d-a5a7-e33e90536134 req-010daf30-e05a-4ca0-95f5-daa2878b7f5d service nova] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Refreshing network info cache for port 9f0aa506-1438-47ac-871c-632df3f943bf {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 806.292021] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Creating linked-clone VM from snapshot {{(pid=68674) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 806.292021] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-bd1d78e7-c498-4a7f-a98a-2b501cc27459 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.301099] env[68674]: DEBUG oslo_vmware.api [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 806.301099] env[68674]: value = "task-3240055" [ 806.301099] env[68674]: _type = "Task" [ 806.301099] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.310665] env[68674]: DEBUG oslo_vmware.api [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240055, 'name': CloneVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.466975] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5aa7de98-f413-4bc1-9643-9089b02545e8 tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "0e3c27fe-a2d9-45dc-9559-a678f90a6fef" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 50.765s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 806.501850] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240052, 'name': CreateVM_Task, 'duration_secs': 0.406872} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.504678] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 806.506032] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.507534] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 806.508056] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 806.508815] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-141f0fb5-0c0e-4372-acec-fa0680fe5f9c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.514135] env[68674]: DEBUG oslo_vmware.api [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3240053, 'name': Rename_Task, 'duration_secs': 0.227596} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.515205] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 806.515456] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-609fe6ee-a0f6-45a3-870b-2bf5f1abdb7f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.518603] env[68674]: DEBUG oslo_vmware.api [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 806.518603] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5203f3cd-62a4-1448-9f64-f39a6aa0679b" [ 806.518603] env[68674]: _type = "Task" [ 806.518603] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.526674] env[68674]: DEBUG oslo_vmware.api [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Waiting for the task: (returnval){ [ 806.526674] env[68674]: value = "task-3240056" [ 806.526674] env[68674]: _type = "Task" [ 806.526674] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.540870] env[68674]: DEBUG oslo_vmware.api [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5203f3cd-62a4-1448-9f64-f39a6aa0679b, 'name': SearchDatastore_Task, 'duration_secs': 0.012675} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.541986] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 806.542428] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 806.542889] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.546750] env[68674]: DEBUG oslo_vmware.api [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3240056, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.593223] env[68674]: DEBUG oslo_vmware.api [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3240054, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062628} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.594638] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 806.595022] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d499297-266c-44a7-994e-e28f019d70a9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.602329] env[68674]: DEBUG oslo_vmware.api [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a6383b-e559-9101-b6a2-1df2fb8005b1, 'name': SearchDatastore_Task, 'duration_secs': 0.018545} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.603259] env[68674]: DEBUG oslo_concurrency.lockutils [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 806.603259] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f/1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 806.603510] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 806.603510] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 806.603885] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cd49969f-60da-4f7e-93fb-52c3a07c5aa7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.615598] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8619beb6-1ba9-4efa-83fd-14dc6c43de3d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.626225] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] 7329e503-d87d-4e15-b181-65ac6e376781/7329e503-d87d-4e15-b181-65ac6e376781.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 806.627362] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-26f2ae1b-13e6-490a-8078-586843809b25 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.645896] env[68674]: DEBUG oslo_vmware.api [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Waiting for the task: (returnval){ [ 806.645896] env[68674]: value = "task-3240057" [ 806.645896] env[68674]: _type = "Task" [ 806.645896] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.652560] env[68674]: DEBUG oslo_vmware.api [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Waiting for the task: (returnval){ [ 806.652560] env[68674]: value = "task-3240058" [ 806.652560] env[68674]: _type = "Task" [ 806.652560] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.652848] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 806.653701] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 806.654526] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76064eef-404f-4c2f-a0d8-eb529aab3701 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.664735] env[68674]: DEBUG oslo_vmware.api [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': task-3240057, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.668196] env[68674]: DEBUG oslo_vmware.api [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 806.668196] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a2defa-9db8-a7cf-091f-6fce012fbf3c" [ 806.668196] env[68674]: _type = "Task" [ 806.668196] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.674717] env[68674]: DEBUG oslo_vmware.api [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3240058, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.681827] env[68674]: DEBUG oslo_vmware.api [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a2defa-9db8-a7cf-091f-6fce012fbf3c, 'name': SearchDatastore_Task, 'duration_secs': 0.01042} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.683243] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01159682-4555-47c3-b6ae-378889656777 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.692932] env[68674]: DEBUG oslo_vmware.api [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 806.692932] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52176189-c861-0a2b-ebef-db62fa660c7c" [ 806.692932] env[68674]: _type = "Task" [ 806.692932] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.702617] env[68674]: DEBUG oslo_vmware.api [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52176189-c861-0a2b-ebef-db62fa660c7c, 'name': SearchDatastore_Task, 'duration_secs': 0.010375} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.703111] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 806.703578] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] e1283f87-5bdb-4d4e-a1c5-f3b1c9180188/e1283f87-5bdb-4d4e-a1c5-f3b1c9180188.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 806.703821] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a464c0de-9bda-42b5-a766-ebca787a0900 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.712400] env[68674]: DEBUG oslo_vmware.api [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 806.712400] env[68674]: value = "task-3240059" [ 806.712400] env[68674]: _type = "Task" [ 806.712400] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.720819] env[68674]: DEBUG oslo_vmware.api [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240059, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.815960] env[68674]: DEBUG oslo_vmware.api [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240055, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.943600] env[68674]: DEBUG nova.network.neutron [req-1e4d0877-8cea-413d-a5a7-e33e90536134 req-010daf30-e05a-4ca0-95f5-daa2878b7f5d service nova] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Updated VIF entry in instance network info cache for port 9f0aa506-1438-47ac-871c-632df3f943bf. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 806.944393] env[68674]: DEBUG nova.network.neutron [req-1e4d0877-8cea-413d-a5a7-e33e90536134 req-010daf30-e05a-4ca0-95f5-daa2878b7f5d service nova] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Updating instance_info_cache with network_info: [{"id": "9f0aa506-1438-47ac-871c-632df3f943bf", "address": "fa:16:3e:e9:fe:db", "network": {"id": "d412f884-932c-461f-8f04-990897b04532", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-692483706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6b179855b874365964446f95f9f5a53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f0aa506-14", "ovs_interfaceid": "9f0aa506-1438-47ac-871c-632df3f943bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.046301] env[68674]: DEBUG oslo_vmware.api [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3240056, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.172018] env[68674]: DEBUG oslo_vmware.api [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': task-3240057, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.187867] env[68674]: DEBUG oslo_vmware.api [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3240058, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.224660] env[68674]: DEBUG oslo_vmware.api [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240059, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.302387] env[68674]: DEBUG oslo_concurrency.lockutils [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquiring lock "f3e7cacd-20d3-4dbe-89b0-80d89173069a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 807.302387] env[68674]: DEBUG oslo_concurrency.lockutils [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "f3e7cacd-20d3-4dbe-89b0-80d89173069a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 807.315570] env[68674]: DEBUG oslo_vmware.api [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240055, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.341687] env[68674]: DEBUG oslo_concurrency.lockutils [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquiring lock "3a0a7950-af31-4a20-a19d-44fbce8735a2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 807.342168] env[68674]: DEBUG oslo_concurrency.lockutils [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "3a0a7950-af31-4a20-a19d-44fbce8735a2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 807.448771] env[68674]: DEBUG oslo_concurrency.lockutils [req-1e4d0877-8cea-413d-a5a7-e33e90536134 req-010daf30-e05a-4ca0-95f5-daa2878b7f5d service nova] Releasing lock "refresh_cache-e1283f87-5bdb-4d4e-a1c5-f3b1c9180188" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 807.451133] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86a36cec-d3a7-45b1-8032-b6d91bc13127 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.497444] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9735067-850c-4cdd-9253-7160be0b1e4d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.511528] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5912f4f-5049-4127-b537-9a428e58cfab {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.562812] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e573f187-6878-484b-bad0-daa7ea43d1c0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.567864] env[68674]: DEBUG oslo_vmware.api [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3240056, 'name': PowerOnVM_Task, 'duration_secs': 0.739394} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.568252] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 807.568511] env[68674]: INFO nova.compute.manager [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Took 10.93 seconds to spawn the instance on the hypervisor. [ 807.568714] env[68674]: DEBUG nova.compute.manager [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 807.571026] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb15957-44ac-4cae-8aab-7c6656250899 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.584729] env[68674]: DEBUG nova.compute.provider_tree [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 807.658512] env[68674]: DEBUG oslo_vmware.api [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': task-3240057, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.590548} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.663123] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f/1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 807.663389] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 807.664066] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3799f36d-b3ba-49cc-889a-acf1c9aabd03 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.672533] env[68674]: DEBUG oslo_vmware.api [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3240058, 'name': ReconfigVM_Task, 'duration_secs': 0.585722} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.673620] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Reconfigured VM instance instance-0000003a to attach disk [datastore1] 7329e503-d87d-4e15-b181-65ac6e376781/7329e503-d87d-4e15-b181-65ac6e376781.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 807.674445] env[68674]: DEBUG oslo_vmware.api [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Waiting for the task: (returnval){ [ 807.674445] env[68674]: value = "task-3240060" [ 807.674445] env[68674]: _type = "Task" [ 807.674445] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.674684] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a5a128d6-1c41-49a6-98f8-5a52d13b0895 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.686601] env[68674]: DEBUG oslo_vmware.api [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': task-3240060, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.688361] env[68674]: DEBUG oslo_vmware.api [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Waiting for the task: (returnval){ [ 807.688361] env[68674]: value = "task-3240061" [ 807.688361] env[68674]: _type = "Task" [ 807.688361] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.699788] env[68674]: DEBUG oslo_vmware.api [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3240061, 'name': Rename_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.724240] env[68674]: DEBUG oslo_vmware.api [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240059, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.863954} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.724623] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] e1283f87-5bdb-4d4e-a1c5-f3b1c9180188/e1283f87-5bdb-4d4e-a1c5-f3b1c9180188.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 807.724782] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 807.725106] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2a685509-c903-4c47-8269-246f6747e45e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.731440] env[68674]: DEBUG oslo_vmware.api [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 807.731440] env[68674]: value = "task-3240062" [ 807.731440] env[68674]: _type = "Task" [ 807.731440] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.740810] env[68674]: DEBUG oslo_vmware.api [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240062, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.804167] env[68674]: DEBUG nova.compute.manager [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 807.815615] env[68674]: DEBUG oslo_vmware.api [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240055, 'name': CloneVM_Task} progress is 95%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.864518] env[68674]: INFO nova.compute.manager [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Rescuing [ 807.864518] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquiring lock "refresh_cache-f029042f-d80b-453e-adc9-1e65d7da7aaf" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.864764] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquired lock "refresh_cache-f029042f-d80b-453e-adc9-1e65d7da7aaf" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 807.864928] env[68674]: DEBUG nova.network.neutron [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 808.089047] env[68674]: DEBUG nova.scheduler.client.report [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 808.105196] env[68674]: INFO nova.compute.manager [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Took 56.99 seconds to build instance. [ 808.186075] env[68674]: DEBUG oslo_vmware.api [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': task-3240060, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07242} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.186412] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 808.187218] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed4cf785-dd88-45de-8cae-90e1cf4f3947 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.199609] env[68674]: DEBUG oslo_vmware.api [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3240061, 'name': Rename_Task, 'duration_secs': 0.289854} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.206514] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 808.215581] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Reconfiguring VM instance instance-00000038 to attach disk [datastore2] 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f/1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 808.215867] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-76b3042d-7ff9-4e85-851f-8d7bc9daaf49 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.217593] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9e83f8e6-f521-4054-8628-7c63a4f32402 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.237040] env[68674]: DEBUG oslo_vmware.api [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Waiting for the task: (returnval){ [ 808.237040] env[68674]: value = "task-3240064" [ 808.237040] env[68674]: _type = "Task" [ 808.237040] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.238282] env[68674]: DEBUG oslo_vmware.api [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Waiting for the task: (returnval){ [ 808.238282] env[68674]: value = "task-3240063" [ 808.238282] env[68674]: _type = "Task" [ 808.238282] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.243941] env[68674]: DEBUG oslo_vmware.api [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240062, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069731} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.247006] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 808.248077] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-245eef65-8dd9-4239-903f-50e59f96205f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.253780] env[68674]: DEBUG oslo_vmware.api [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': task-3240064, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.259219] env[68674]: DEBUG oslo_vmware.api [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3240063, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.276976] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Reconfiguring VM instance instance-0000003b to attach disk [datastore2] e1283f87-5bdb-4d4e-a1c5-f3b1c9180188/e1283f87-5bdb-4d4e-a1c5-f3b1c9180188.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 808.277436] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-81ac5c9f-1c90-4f57-909e-e8239dc1abb8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.295991] env[68674]: DEBUG oslo_vmware.api [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 808.295991] env[68674]: value = "task-3240065" [ 808.295991] env[68674]: _type = "Task" [ 808.295991] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.304016] env[68674]: DEBUG oslo_vmware.api [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240065, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.319703] env[68674]: DEBUG oslo_vmware.api [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240055, 'name': CloneVM_Task} progress is 95%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.335654] env[68674]: DEBUG oslo_concurrency.lockutils [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 808.593852] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.753s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 808.594430] env[68674]: DEBUG nova.compute.manager [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 808.597323] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a631a3f0-bce2-453b-a728-d0149548db73 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 43.217s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 808.597543] env[68674]: DEBUG nova.objects.instance [None req-a631a3f0-bce2-453b-a728-d0149548db73 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lazy-loading 'resources' on Instance uuid 3463e09e-dc2f-432c-9eff-8192c2616240 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 808.606760] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b26ca003-5133-4b47-8344-92e63cb4fc63 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Lock "c4fd04a7-2b11-4c4b-84d1-53edc1e3f035" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 104.829s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 808.618185] env[68674]: DEBUG nova.network.neutron [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Updating instance_info_cache with network_info: [{"id": "eb762aca-5fbd-45f4-a81e-77d2c5d7aaf7", "address": "fa:16:3e:21:74:75", "network": {"id": "1674c138-dbec-4d03-b5b0-d1944ab38577", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-143958570-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a84d9d6e23bd40049c34e6f087252b4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb762aca-5f", "ovs_interfaceid": "eb762aca-5fbd-45f4-a81e-77d2c5d7aaf7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.747279] env[68674]: DEBUG oslo_vmware.api [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': task-3240064, 'name': ReconfigVM_Task, 'duration_secs': 0.319295} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.751214] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Reconfigured VM instance instance-00000038 to attach disk [datastore2] 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f/1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 808.751214] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-91b38ba7-923d-4e3c-9138-6a132ce46b17 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.757286] env[68674]: DEBUG oslo_vmware.api [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3240063, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.758498] env[68674]: DEBUG oslo_vmware.api [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Waiting for the task: (returnval){ [ 808.758498] env[68674]: value = "task-3240066" [ 808.758498] env[68674]: _type = "Task" [ 808.758498] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.768676] env[68674]: DEBUG oslo_vmware.api [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': task-3240066, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.805782] env[68674]: DEBUG oslo_vmware.api [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240065, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.816836] env[68674]: DEBUG oslo_vmware.api [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240055, 'name': CloneVM_Task, 'duration_secs': 2.155448} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.817191] env[68674]: INFO nova.virt.vmwareapi.vmops [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Created linked-clone VM from snapshot [ 808.818022] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee8eb47d-b91d-4f8a-8558-b8f6e40892d7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.825597] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Uploading image 496b987e-faad-4d33-9ebe-e3f02bf8e7d6 {{(pid=68674) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 808.850921] env[68674]: DEBUG oslo_vmware.rw_handles [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 808.850921] env[68674]: value = "vm-647559" [ 808.850921] env[68674]: _type = "VirtualMachine" [ 808.850921] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 808.851318] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-02fe6f04-a407-4d82-879f-af03f92c507e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.858759] env[68674]: DEBUG oslo_vmware.rw_handles [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lease: (returnval){ [ 808.858759] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523ad20f-4a1c-534e-988b-0c133ddf942e" [ 808.858759] env[68674]: _type = "HttpNfcLease" [ 808.858759] env[68674]: } obtained for exporting VM: (result){ [ 808.858759] env[68674]: value = "vm-647559" [ 808.858759] env[68674]: _type = "VirtualMachine" [ 808.858759] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 808.859067] env[68674]: DEBUG oslo_vmware.api [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the lease: (returnval){ [ 808.859067] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523ad20f-4a1c-534e-988b-0c133ddf942e" [ 808.859067] env[68674]: _type = "HttpNfcLease" [ 808.859067] env[68674]: } to be ready. {{(pid=68674) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 808.866082] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 808.866082] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523ad20f-4a1c-534e-988b-0c133ddf942e" [ 808.866082] env[68674]: _type = "HttpNfcLease" [ 808.866082] env[68674]: } is initializing. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 809.052674] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d287e0de-2631-4184-bc6b-798f815f224c tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Acquiring lock "3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 809.052674] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d287e0de-2631-4184-bc6b-798f815f224c tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Lock "3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 809.052674] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d287e0de-2631-4184-bc6b-798f815f224c tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Acquiring lock "3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 809.052674] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d287e0de-2631-4184-bc6b-798f815f224c tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Lock "3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 809.052674] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d287e0de-2631-4184-bc6b-798f815f224c tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Lock "3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 809.055740] env[68674]: INFO nova.compute.manager [None req-d287e0de-2631-4184-bc6b-798f815f224c tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Terminating instance [ 809.100552] env[68674]: DEBUG nova.compute.utils [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 809.106374] env[68674]: DEBUG nova.compute.manager [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 809.106374] env[68674]: DEBUG nova.network.neutron [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 809.109397] env[68674]: DEBUG nova.compute.manager [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 809.120556] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Releasing lock "refresh_cache-f029042f-d80b-453e-adc9-1e65d7da7aaf" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 809.167766] env[68674]: DEBUG nova.policy [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '21bef802c4cc456986781f0ec9ce00e7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5555b26beb384c7680587cfdb67d9a10', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 809.261739] env[68674]: DEBUG oslo_vmware.api [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3240063, 'name': PowerOnVM_Task, 'duration_secs': 0.896051} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.266982] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 809.267343] env[68674]: INFO nova.compute.manager [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Took 8.30 seconds to spawn the instance on the hypervisor. [ 809.267609] env[68674]: DEBUG nova.compute.manager [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 809.271091] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07a12045-ee86-43cc-9ce1-0a8ea9a80a01 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.278819] env[68674]: DEBUG oslo_vmware.api [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': task-3240066, 'name': Rename_Task, 'duration_secs': 0.128132} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.280562] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 809.283544] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3dde0ac0-50ee-4a33-804f-415833318a46 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.291433] env[68674]: DEBUG oslo_vmware.api [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Waiting for the task: (returnval){ [ 809.291433] env[68674]: value = "task-3240068" [ 809.291433] env[68674]: _type = "Task" [ 809.291433] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.300096] env[68674]: DEBUG oslo_vmware.api [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': task-3240068, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.310677] env[68674]: DEBUG oslo_vmware.api [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240065, 'name': ReconfigVM_Task, 'duration_secs': 0.69145} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.311096] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Reconfigured VM instance instance-0000003b to attach disk [datastore2] e1283f87-5bdb-4d4e-a1c5-f3b1c9180188/e1283f87-5bdb-4d4e-a1c5-f3b1c9180188.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 809.311872] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fb758fad-417f-490f-a545-409bb23c8924 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.317543] env[68674]: DEBUG oslo_vmware.api [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 809.317543] env[68674]: value = "task-3240069" [ 809.317543] env[68674]: _type = "Task" [ 809.317543] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.330259] env[68674]: DEBUG oslo_vmware.api [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240069, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.369548] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 809.369548] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523ad20f-4a1c-534e-988b-0c133ddf942e" [ 809.369548] env[68674]: _type = "HttpNfcLease" [ 809.369548] env[68674]: } is ready. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 809.370444] env[68674]: DEBUG oslo_vmware.rw_handles [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 809.370444] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523ad20f-4a1c-534e-988b-0c133ddf942e" [ 809.370444] env[68674]: _type = "HttpNfcLease" [ 809.370444] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 809.370888] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0311cf07-ef3f-41cf-a97b-f90305e99213 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.383137] env[68674]: DEBUG oslo_vmware.rw_handles [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b5ace5-6b18-d614-9eda-06f8471e5b81/disk-0.vmdk from lease info. {{(pid=68674) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 809.383137] env[68674]: DEBUG oslo_vmware.rw_handles [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b5ace5-6b18-d614-9eda-06f8471e5b81/disk-0.vmdk for reading. {{(pid=68674) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 809.485956] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4cec0e5e-1c9d-4d25-aebe-848adbd6c839 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.562382] env[68674]: DEBUG nova.compute.manager [None req-d287e0de-2631-4184-bc6b-798f815f224c tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 809.562382] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d287e0de-2631-4184-bc6b-798f815f224c tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 809.563292] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad61f11f-202e-44ff-a570-6aece3544e68 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.574807] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d287e0de-2631-4184-bc6b-798f815f224c tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 809.577613] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9a1fda5d-86da-4167-8eb1-7a55fdd7c66c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.584415] env[68674]: DEBUG oslo_vmware.api [None req-d287e0de-2631-4184-bc6b-798f815f224c tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for the task: (returnval){ [ 809.584415] env[68674]: value = "task-3240070" [ 809.584415] env[68674]: _type = "Task" [ 809.584415] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.594654] env[68674]: DEBUG oslo_vmware.api [None req-d287e0de-2631-4184-bc6b-798f815f224c tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3240070, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.608567] env[68674]: DEBUG nova.compute.manager [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 809.621036] env[68674]: DEBUG nova.network.neutron [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Successfully created port: a6b5f701-3612-4016-86eb-a45074df2088 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 809.646789] env[68674]: DEBUG oslo_concurrency.lockutils [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 809.771579] env[68674]: DEBUG nova.compute.manager [req-40b23022-80b4-43d4-92f3-1acf0cff6da3 req-607c1139-02fd-485e-b9d2-cc8a0559be0a service nova] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Received event network-changed-4b49219f-9f62-4839-b5a2-eb1116da215c {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 809.771803] env[68674]: DEBUG nova.compute.manager [req-40b23022-80b4-43d4-92f3-1acf0cff6da3 req-607c1139-02fd-485e-b9d2-cc8a0559be0a service nova] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Refreshing instance network info cache due to event network-changed-4b49219f-9f62-4839-b5a2-eb1116da215c. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 809.772572] env[68674]: DEBUG oslo_concurrency.lockutils [req-40b23022-80b4-43d4-92f3-1acf0cff6da3 req-607c1139-02fd-485e-b9d2-cc8a0559be0a service nova] Acquiring lock "refresh_cache-c4fd04a7-2b11-4c4b-84d1-53edc1e3f035" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.772734] env[68674]: DEBUG oslo_concurrency.lockutils [req-40b23022-80b4-43d4-92f3-1acf0cff6da3 req-607c1139-02fd-485e-b9d2-cc8a0559be0a service nova] Acquired lock "refresh_cache-c4fd04a7-2b11-4c4b-84d1-53edc1e3f035" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 809.772901] env[68674]: DEBUG nova.network.neutron [req-40b23022-80b4-43d4-92f3-1acf0cff6da3 req-607c1139-02fd-485e-b9d2-cc8a0559be0a service nova] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Refreshing network info cache for port 4b49219f-9f62-4839-b5a2-eb1116da215c {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 809.799392] env[68674]: INFO nova.compute.manager [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Took 53.94 seconds to build instance. [ 809.808762] env[68674]: DEBUG oslo_vmware.api [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': task-3240068, 'name': PowerOnVM_Task, 'duration_secs': 0.468911} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.809068] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 809.809282] env[68674]: DEBUG nova.compute.manager [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 809.810580] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8941562f-7c2b-462f-bfd1-16f8b83b0452 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.834695] env[68674]: DEBUG oslo_vmware.api [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240069, 'name': Rename_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.866647] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8754c9e-6d80-4012-aeb3-6467d164d304 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.874887] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-149ab103-608f-4a91-baf1-93b1eab6929c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.911480] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f5cca33-332f-41a1-98af-11c8f83d8067 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.920286] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22ec0a43-664a-49b2-a68a-bc740c563123 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.938558] env[68674]: DEBUG nova.compute.provider_tree [None req-a631a3f0-bce2-453b-a728-d0149548db73 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 810.097615] env[68674]: DEBUG oslo_vmware.api [None req-d287e0de-2631-4184-bc6b-798f815f224c tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3240070, 'name': PowerOffVM_Task, 'duration_secs': 0.304801} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.098156] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d287e0de-2631-4184-bc6b-798f815f224c tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 810.098496] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d287e0de-2631-4184-bc6b-798f815f224c tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 810.098895] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-57b1a87f-7eca-4127-8f41-fb6ebe61066c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.190223] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d287e0de-2631-4184-bc6b-798f815f224c tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 810.190527] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d287e0de-2631-4184-bc6b-798f815f224c tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 810.190770] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-d287e0de-2631-4184-bc6b-798f815f224c tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Deleting the datastore file [datastore2] 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 810.192152] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-393570a9-bab7-477e-ac2e-0b66c5e20236 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.200094] env[68674]: DEBUG oslo_vmware.api [None req-d287e0de-2631-4184-bc6b-798f815f224c tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for the task: (returnval){ [ 810.200094] env[68674]: value = "task-3240072" [ 810.200094] env[68674]: _type = "Task" [ 810.200094] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.211393] env[68674]: DEBUG oslo_vmware.api [None req-d287e0de-2631-4184-bc6b-798f815f224c tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3240072, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.302830] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2ed5da5a-5338-42c2-9df8-4b7b953bdbda tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Lock "7329e503-d87d-4e15-b181-65ac6e376781" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.828s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 810.337635] env[68674]: DEBUG oslo_vmware.api [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240069, 'name': Rename_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.344024] env[68674]: DEBUG oslo_concurrency.lockutils [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 810.443411] env[68674]: DEBUG nova.scheduler.client.report [None req-a631a3f0-bce2-453b-a728-d0149548db73 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 810.585476] env[68674]: DEBUG nova.network.neutron [req-40b23022-80b4-43d4-92f3-1acf0cff6da3 req-607c1139-02fd-485e-b9d2-cc8a0559be0a service nova] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Updated VIF entry in instance network info cache for port 4b49219f-9f62-4839-b5a2-eb1116da215c. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 810.585985] env[68674]: DEBUG nova.network.neutron [req-40b23022-80b4-43d4-92f3-1acf0cff6da3 req-607c1139-02fd-485e-b9d2-cc8a0559be0a service nova] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Updating instance_info_cache with network_info: [{"id": "4b49219f-9f62-4839-b5a2-eb1116da215c", "address": "fa:16:3e:0d:4d:8a", "network": {"id": "f11cd326-6319-47eb-833d-5282731628e9", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-287739122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.139", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e05a97545e94e8a9be8f382457d79b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f1b507ed-cd2d-4c09-9d96-c47bde6a7774", "external-id": "nsx-vlan-transportzone-980", "segmentation_id": 980, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b49219f-9f", "ovs_interfaceid": "4b49219f-9f62-4839-b5a2-eb1116da215c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 810.620319] env[68674]: DEBUG nova.compute.manager [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 810.678989] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 810.679503] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e995b34f-e168-4c3b-9ee6-e9c5a6f44a97 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.687532] env[68674]: DEBUG oslo_vmware.api [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 810.687532] env[68674]: value = "task-3240073" [ 810.687532] env[68674]: _type = "Task" [ 810.687532] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.697866] env[68674]: DEBUG oslo_vmware.api [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240073, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.709446] env[68674]: DEBUG oslo_vmware.api [None req-d287e0de-2631-4184-bc6b-798f815f224c tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3240072, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.255169} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.709974] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-d287e0de-2631-4184-bc6b-798f815f224c tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 810.709974] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d287e0de-2631-4184-bc6b-798f815f224c tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 810.709974] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d287e0de-2631-4184-bc6b-798f815f224c tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 810.709974] env[68674]: INFO nova.compute.manager [None req-d287e0de-2631-4184-bc6b-798f815f224c tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Took 1.15 seconds to destroy the instance on the hypervisor. [ 810.711399] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d287e0de-2631-4184-bc6b-798f815f224c tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 810.711723] env[68674]: DEBUG nova.compute.manager [-] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 810.711934] env[68674]: DEBUG nova.network.neutron [-] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 810.829816] env[68674]: DEBUG oslo_vmware.api [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240069, 'name': Rename_Task, 'duration_secs': 1.219069} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.830130] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 810.830518] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-09eeb55a-9300-4e54-bf7a-cb235ad8f837 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.837629] env[68674]: DEBUG oslo_vmware.api [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 810.837629] env[68674]: value = "task-3240074" [ 810.837629] env[68674]: _type = "Task" [ 810.837629] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.846849] env[68674]: DEBUG oslo_vmware.api [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240074, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.865374] env[68674]: DEBUG oslo_concurrency.lockutils [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Acquiring lock "1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 810.865762] env[68674]: DEBUG oslo_concurrency.lockutils [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Lock "1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 810.866038] env[68674]: DEBUG oslo_concurrency.lockutils [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Acquiring lock "1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 810.866233] env[68674]: DEBUG oslo_concurrency.lockutils [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Lock "1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 810.866481] env[68674]: DEBUG oslo_concurrency.lockutils [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Lock "1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 810.869200] env[68674]: INFO nova.compute.manager [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Terminating instance [ 810.949154] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a631a3f0-bce2-453b-a728-d0149548db73 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.352s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 810.957713] env[68674]: DEBUG oslo_concurrency.lockutils [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 44.554s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 810.960202] env[68674]: INFO nova.compute.claims [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 810.986468] env[68674]: INFO nova.scheduler.client.report [None req-a631a3f0-bce2-453b-a728-d0149548db73 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Deleted allocations for instance 3463e09e-dc2f-432c-9eff-8192c2616240 [ 811.089719] env[68674]: DEBUG oslo_concurrency.lockutils [req-40b23022-80b4-43d4-92f3-1acf0cff6da3 req-607c1139-02fd-485e-b9d2-cc8a0559be0a service nova] Releasing lock "refresh_cache-c4fd04a7-2b11-4c4b-84d1-53edc1e3f035" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 811.201890] env[68674]: DEBUG oslo_vmware.api [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240073, 'name': PowerOffVM_Task, 'duration_secs': 0.300865} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.202542] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 811.203679] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c00d9d0-3630-4607-bedb-465a8a6cd0d0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.225618] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02329f68-0514-4364-b165-b0d119d0c4c0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.320698] env[68674]: DEBUG nova.network.neutron [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Successfully updated port: a6b5f701-3612-4016-86eb-a45074df2088 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 811.349016] env[68674]: DEBUG oslo_vmware.api [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240074, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.375751] env[68674]: DEBUG oslo_concurrency.lockutils [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Acquiring lock "refresh_cache-1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.376055] env[68674]: DEBUG oslo_concurrency.lockutils [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Acquired lock "refresh_cache-1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 811.376330] env[68674]: DEBUG nova.network.neutron [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 811.496094] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a631a3f0-bce2-453b-a728-d0149548db73 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "3463e09e-dc2f-432c-9eff-8192c2616240" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 50.100s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 811.562912] env[68674]: DEBUG nova.network.neutron [-] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.825380] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Acquiring lock "refresh_cache-5bd42044-84f5-4f48-aa97-b7cf990ed35d" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.825380] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Acquired lock "refresh_cache-5bd42044-84f5-4f48-aa97-b7cf990ed35d" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 811.825380] env[68674]: DEBUG nova.network.neutron [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 811.849314] env[68674]: DEBUG oslo_vmware.api [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240074, 'name': PowerOnVM_Task, 'duration_secs': 0.627335} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.849609] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 811.849831] env[68674]: INFO nova.compute.manager [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Took 8.19 seconds to spawn the instance on the hypervisor. [ 811.850024] env[68674]: DEBUG nova.compute.manager [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 811.850899] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16f05062-a60f-4c58-8ef3-5be2faf45e66 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.902377] env[68674]: DEBUG nova.network.neutron [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 811.953463] env[68674]: DEBUG nova.network.neutron [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.065948] env[68674]: INFO nova.compute.manager [-] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Took 1.35 seconds to deallocate network for instance. [ 812.211372] env[68674]: DEBUG nova.compute.manager [req-62e1a444-b7db-441c-a18f-4f99950b8f93 req-5b3930cf-7629-4648-8a42-06502bcd4eff service nova] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Received event network-vif-plugged-a6b5f701-3612-4016-86eb-a45074df2088 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 812.211710] env[68674]: DEBUG oslo_concurrency.lockutils [req-62e1a444-b7db-441c-a18f-4f99950b8f93 req-5b3930cf-7629-4648-8a42-06502bcd4eff service nova] Acquiring lock "5bd42044-84f5-4f48-aa97-b7cf990ed35d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 812.211990] env[68674]: DEBUG oslo_concurrency.lockutils [req-62e1a444-b7db-441c-a18f-4f99950b8f93 req-5b3930cf-7629-4648-8a42-06502bcd4eff service nova] Lock "5bd42044-84f5-4f48-aa97-b7cf990ed35d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 812.212149] env[68674]: DEBUG oslo_concurrency.lockutils [req-62e1a444-b7db-441c-a18f-4f99950b8f93 req-5b3930cf-7629-4648-8a42-06502bcd4eff service nova] Lock "5bd42044-84f5-4f48-aa97-b7cf990ed35d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 812.212326] env[68674]: DEBUG nova.compute.manager [req-62e1a444-b7db-441c-a18f-4f99950b8f93 req-5b3930cf-7629-4648-8a42-06502bcd4eff service nova] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] No waiting events found dispatching network-vif-plugged-a6b5f701-3612-4016-86eb-a45074df2088 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 812.212493] env[68674]: WARNING nova.compute.manager [req-62e1a444-b7db-441c-a18f-4f99950b8f93 req-5b3930cf-7629-4648-8a42-06502bcd4eff service nova] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Received unexpected event network-vif-plugged-a6b5f701-3612-4016-86eb-a45074df2088 for instance with vm_state building and task_state spawning. [ 812.366978] env[68674]: DEBUG nova.network.neutron [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 812.374472] env[68674]: INFO nova.compute.manager [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Took 54.00 seconds to build instance. [ 812.445458] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dcd038c-4ba0-47a7-81dc-edfb76536edd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.456182] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b46cee63-d60b-4a50-8e06-7ac6d9db7896 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.464191] env[68674]: DEBUG oslo_concurrency.lockutils [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Releasing lock "refresh_cache-1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 812.464761] env[68674]: DEBUG nova.compute.manager [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 812.465047] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 812.466373] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8397f399-b0aa-43e4-8b54-77aca315f01b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.505747] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-372f845f-40d6-4abd-9594-e565054e485a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.509088] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 812.512136] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9786a9c8-2c49-4198-9ed7-867c59ba58fd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.523015] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa83cfe0-485a-47a2-b1f1-506538a7594f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.527889] env[68674]: DEBUG oslo_vmware.api [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Waiting for the task: (returnval){ [ 812.527889] env[68674]: value = "task-3240075" [ 812.527889] env[68674]: _type = "Task" [ 812.527889] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.544009] env[68674]: DEBUG nova.compute.provider_tree [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 812.551092] env[68674]: DEBUG oslo_vmware.api [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': task-3240075, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.552531] env[68674]: DEBUG nova.network.neutron [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Updating instance_info_cache with network_info: [{"id": "a6b5f701-3612-4016-86eb-a45074df2088", "address": "fa:16:3e:3b:4b:ba", "network": {"id": "c49e5bae-b7f8-4568-9b4e-78ce6b3506e1", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-2053822217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5555b26beb384c7680587cfdb67d9a10", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6b5f701-36", "ovs_interfaceid": "a6b5f701-3612-4016-86eb-a45074df2088", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.574127] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d287e0de-2631-4184-bc6b-798f815f224c tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 812.773468] env[68674]: DEBUG nova.compute.manager [req-bb70be62-3ad6-4e10-9243-719dbed8b8fc req-885ab881-8ad4-4dc2-b190-4cacae1b0f0d service nova] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Received event network-vif-deleted-5624a182-6a62-46b8-b456-e3e59a2ed84e {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 812.773753] env[68674]: DEBUG nova.compute.manager [req-bb70be62-3ad6-4e10-9243-719dbed8b8fc req-885ab881-8ad4-4dc2-b190-4cacae1b0f0d service nova] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Received event network-changed-0a9b5c9d-f85d-42f8-9e17-5a63a48e7ea4 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 812.773946] env[68674]: DEBUG nova.compute.manager [req-bb70be62-3ad6-4e10-9243-719dbed8b8fc req-885ab881-8ad4-4dc2-b190-4cacae1b0f0d service nova] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Refreshing instance network info cache due to event network-changed-0a9b5c9d-f85d-42f8-9e17-5a63a48e7ea4. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 812.774237] env[68674]: DEBUG oslo_concurrency.lockutils [req-bb70be62-3ad6-4e10-9243-719dbed8b8fc req-885ab881-8ad4-4dc2-b190-4cacae1b0f0d service nova] Acquiring lock "refresh_cache-7329e503-d87d-4e15-b181-65ac6e376781" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.774483] env[68674]: DEBUG oslo_concurrency.lockutils [req-bb70be62-3ad6-4e10-9243-719dbed8b8fc req-885ab881-8ad4-4dc2-b190-4cacae1b0f0d service nova] Acquired lock "refresh_cache-7329e503-d87d-4e15-b181-65ac6e376781" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 812.774677] env[68674]: DEBUG nova.network.neutron [req-bb70be62-3ad6-4e10-9243-719dbed8b8fc req-885ab881-8ad4-4dc2-b190-4cacae1b0f0d service nova] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Refreshing network info cache for port 0a9b5c9d-f85d-42f8-9e17-5a63a48e7ea4 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 812.876761] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9367fefe-423c-4610-ba7b-588258f193e5 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "e1283f87-5bdb-4d4e-a1c5-f3b1c9180188" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.038s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 813.012709] env[68674]: DEBUG oslo_concurrency.lockutils [None req-04aa8ce3-175b-4d0a-8a2b-73b4712730b4 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "02d4aee3-7267-4658-a277-8a9a00dd9f6e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 813.013083] env[68674]: DEBUG oslo_concurrency.lockutils [None req-04aa8ce3-175b-4d0a-8a2b-73b4712730b4 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "02d4aee3-7267-4658-a277-8a9a00dd9f6e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.013316] env[68674]: DEBUG oslo_concurrency.lockutils [None req-04aa8ce3-175b-4d0a-8a2b-73b4712730b4 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "02d4aee3-7267-4658-a277-8a9a00dd9f6e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 813.013505] env[68674]: DEBUG oslo_concurrency.lockutils [None req-04aa8ce3-175b-4d0a-8a2b-73b4712730b4 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "02d4aee3-7267-4658-a277-8a9a00dd9f6e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.013667] env[68674]: DEBUG oslo_concurrency.lockutils [None req-04aa8ce3-175b-4d0a-8a2b-73b4712730b4 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "02d4aee3-7267-4658-a277-8a9a00dd9f6e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 813.015936] env[68674]: INFO nova.compute.manager [None req-04aa8ce3-175b-4d0a-8a2b-73b4712730b4 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Terminating instance [ 813.040458] env[68674]: DEBUG oslo_vmware.api [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': task-3240075, 'name': PowerOffVM_Task, 'duration_secs': 0.234089} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.040737] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 813.040954] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 813.041186] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8f3370d8-789c-4c76-9e88-2eb53958b979 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.048145] env[68674]: DEBUG nova.scheduler.client.report [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 813.055951] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Releasing lock "refresh_cache-5bd42044-84f5-4f48-aa97-b7cf990ed35d" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 813.056401] env[68674]: DEBUG nova.compute.manager [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Instance network_info: |[{"id": "a6b5f701-3612-4016-86eb-a45074df2088", "address": "fa:16:3e:3b:4b:ba", "network": {"id": "c49e5bae-b7f8-4568-9b4e-78ce6b3506e1", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-2053822217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5555b26beb384c7680587cfdb67d9a10", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6b5f701-36", "ovs_interfaceid": "a6b5f701-3612-4016-86eb-a45074df2088", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 813.067893] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 813.068308] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 813.068608] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Deleting the datastore file [datastore2] 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 813.068945] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c4bece23-b01d-4e48-a351-838983ce87ee {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.076741] env[68674]: DEBUG oslo_vmware.api [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Waiting for the task: (returnval){ [ 813.076741] env[68674]: value = "task-3240077" [ 813.076741] env[68674]: _type = "Task" [ 813.076741] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.086300] env[68674]: DEBUG oslo_vmware.api [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': task-3240077, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.454601] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2077664a-dd87-4a0f-b6f0-24029664104e tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Acquiring lock "7329e503-d87d-4e15-b181-65ac6e376781" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 813.454954] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2077664a-dd87-4a0f-b6f0-24029664104e tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Lock "7329e503-d87d-4e15-b181-65ac6e376781" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.455336] env[68674]: INFO nova.compute.manager [None req-2077664a-dd87-4a0f-b6f0-24029664104e tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Rebooting instance [ 813.520419] env[68674]: DEBUG nova.compute.manager [None req-04aa8ce3-175b-4d0a-8a2b-73b4712730b4 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 813.520870] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-04aa8ce3-175b-4d0a-8a2b-73b4712730b4 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 813.521811] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d43beded-75d7-41a7-905c-1b435ef45cdd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.530816] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-04aa8ce3-175b-4d0a-8a2b-73b4712730b4 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 813.531163] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-501bdf54-e284-4c4d-824b-b151e3628032 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.541180] env[68674]: DEBUG oslo_vmware.api [None req-04aa8ce3-175b-4d0a-8a2b-73b4712730b4 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 813.541180] env[68674]: value = "task-3240078" [ 813.541180] env[68674]: _type = "Task" [ 813.541180] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.550509] env[68674]: DEBUG oslo_vmware.api [None req-04aa8ce3-175b-4d0a-8a2b-73b4712730b4 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3240078, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.553439] env[68674]: DEBUG oslo_concurrency.lockutils [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.596s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 813.554116] env[68674]: DEBUG nova.compute.manager [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 813.556787] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 44.740s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.558414] env[68674]: INFO nova.compute.claims [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 813.590037] env[68674]: DEBUG oslo_vmware.api [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Task: {'id': task-3240077, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14045} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.592725] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 813.592990] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 813.593217] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 813.593398] env[68674]: INFO nova.compute.manager [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Took 1.13 seconds to destroy the instance on the hypervisor. [ 813.593650] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 813.594547] env[68674]: DEBUG nova.compute.manager [-] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 813.594653] env[68674]: DEBUG nova.network.neutron [-] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 813.614517] env[68674]: DEBUG nova.network.neutron [-] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 813.745121] env[68674]: DEBUG nova.network.neutron [req-bb70be62-3ad6-4e10-9243-719dbed8b8fc req-885ab881-8ad4-4dc2-b190-4cacae1b0f0d service nova] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Updated VIF entry in instance network info cache for port 0a9b5c9d-f85d-42f8-9e17-5a63a48e7ea4. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 813.745667] env[68674]: DEBUG nova.network.neutron [req-bb70be62-3ad6-4e10-9243-719dbed8b8fc req-885ab881-8ad4-4dc2-b190-4cacae1b0f0d service nova] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Updating instance_info_cache with network_info: [{"id": "0a9b5c9d-f85d-42f8-9e17-5a63a48e7ea4", "address": "fa:16:3e:55:82:ec", "network": {"id": "69e2ef02-944e-40b2-88f2-3a00c754a5eb", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-125455610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "435fbf1f847d4d36ba126fc8c49b59fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3e0aae3-33d1-403b-bfaf-306f77a1422e", "external-id": "nsx-vlan-transportzone-211", "segmentation_id": 211, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a9b5c9d-f8", "ovs_interfaceid": "0a9b5c9d-f85d-42f8-9e17-5a63a48e7ea4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.981445] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2077664a-dd87-4a0f-b6f0-24029664104e tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Acquiring lock "refresh_cache-7329e503-d87d-4e15-b181-65ac6e376781" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.052796] env[68674]: DEBUG oslo_vmware.api [None req-04aa8ce3-175b-4d0a-8a2b-73b4712730b4 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3240078, 'name': PowerOffVM_Task, 'duration_secs': 0.335157} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.053431] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-04aa8ce3-175b-4d0a-8a2b-73b4712730b4 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 814.053431] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-04aa8ce3-175b-4d0a-8a2b-73b4712730b4 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 814.054481] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fb2b3a8c-cfb1-4813-918d-4b03b3854d49 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.063325] env[68674]: DEBUG nova.compute.utils [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 814.067248] env[68674]: DEBUG nova.compute.manager [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 814.067429] env[68674]: DEBUG nova.network.neutron [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 814.110409] env[68674]: DEBUG nova.policy [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5080a4f68ef1482caaee5aa26614e6f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c958fcb56a934ef7919b76aa2a193429', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 814.117076] env[68674]: DEBUG nova.network.neutron [-] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.125727] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-04aa8ce3-175b-4d0a-8a2b-73b4712730b4 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 814.126047] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-04aa8ce3-175b-4d0a-8a2b-73b4712730b4 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 814.126280] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-04aa8ce3-175b-4d0a-8a2b-73b4712730b4 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Deleting the datastore file [datastore2] 02d4aee3-7267-4658-a277-8a9a00dd9f6e {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 814.126586] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d9bff4df-f048-4e15-82f6-fece1826d064 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.134371] env[68674]: DEBUG oslo_vmware.api [None req-04aa8ce3-175b-4d0a-8a2b-73b4712730b4 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 814.134371] env[68674]: value = "task-3240080" [ 814.134371] env[68674]: _type = "Task" [ 814.134371] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.143511] env[68674]: DEBUG oslo_vmware.api [None req-04aa8ce3-175b-4d0a-8a2b-73b4712730b4 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3240080, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.249679] env[68674]: DEBUG oslo_concurrency.lockutils [req-bb70be62-3ad6-4e10-9243-719dbed8b8fc req-885ab881-8ad4-4dc2-b190-4cacae1b0f0d service nova] Releasing lock "refresh_cache-7329e503-d87d-4e15-b181-65ac6e376781" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 814.250260] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2077664a-dd87-4a0f-b6f0-24029664104e tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Acquired lock "refresh_cache-7329e503-d87d-4e15-b181-65ac6e376781" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 814.250636] env[68674]: DEBUG nova.network.neutron [None req-2077664a-dd87-4a0f-b6f0-24029664104e tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 814.351127] env[68674]: DEBUG nova.virt.hardware [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 814.351127] env[68674]: DEBUG nova.virt.hardware [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 814.351127] env[68674]: DEBUG nova.virt.hardware [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 814.351506] env[68674]: DEBUG nova.virt.hardware [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 814.354774] env[68674]: DEBUG nova.virt.hardware [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 814.354774] env[68674]: DEBUG nova.virt.hardware [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 814.354774] env[68674]: DEBUG nova.virt.hardware [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 814.354774] env[68674]: DEBUG nova.virt.hardware [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 814.354774] env[68674]: DEBUG nova.virt.hardware [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 814.354774] env[68674]: DEBUG nova.virt.hardware [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 814.354774] env[68674]: DEBUG nova.virt.hardware [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 814.355052] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e018bc10-d956-495f-9e41-f8ba627ac9d4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.364665] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af491938-4517-4ef1-850a-71d42671b810 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.372236] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 814.373764] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-43cb5ecf-3aa2-4f97-a34b-c642e8b9bee4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.391775] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3b:4b:ba', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ae18b41f-e73c-44f1-83dd-467c080944f4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a6b5f701-3612-4016-86eb-a45074df2088', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 814.399459] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 814.404271] env[68674]: DEBUG oslo_vmware.rw_handles [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a147bd-bf8a-e67c-0343-b0f68d8aa40d/disk-0.vmdk. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 814.404271] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 814.404424] env[68674]: DEBUG oslo_vmware.api [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 814.404424] env[68674]: value = "task-3240081" [ 814.404424] env[68674]: _type = "Task" [ 814.404424] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.405227] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d88b43ce-2c1c-4a39-ad0c-4e9566f3784e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.407950] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ef944b86-dae1-4dab-8d5d-dd62774f91d1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.426064] env[68674]: DEBUG nova.network.neutron [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Successfully created port: 14aed01d-1948-4a24-8075-fa23078ec7a6 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 814.434769] env[68674]: DEBUG oslo_vmware.rw_handles [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a147bd-bf8a-e67c-0343-b0f68d8aa40d/disk-0.vmdk is in state: ready. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 814.434968] env[68674]: ERROR oslo_vmware.rw_handles [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a147bd-bf8a-e67c-0343-b0f68d8aa40d/disk-0.vmdk due to incomplete transfer. [ 814.439616] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f18ce2ef-8c4d-496e-b82b-4d959466ed69 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.441360] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 814.441360] env[68674]: value = "task-3240082" [ 814.441360] env[68674]: _type = "Task" [ 814.441360] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.441809] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] VM already powered off {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 814.442051] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 814.442456] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.442619] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 814.442806] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 814.446800] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eb708851-d3ba-4621-9df3-d2293ebc2cfd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.449812] env[68674]: DEBUG oslo_vmware.rw_handles [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a147bd-bf8a-e67c-0343-b0f68d8aa40d/disk-0.vmdk. {{(pid=68674) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 814.450895] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Uploaded image bd367444-bce1-48c0-91d9-30bd5d973e39 to the Glance image server {{(pid=68674) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 814.453058] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Destroying the VM {{(pid=68674) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 814.454219] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-9fb584b4-6f98-47c0-b86c-b6831ed597ef {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.460132] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240082, 'name': CreateVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.462134] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 814.462393] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 814.463240] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d1ea264-0e17-46b7-a3cc-94c035f09cd7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.467488] env[68674]: DEBUG oslo_vmware.api [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 814.467488] env[68674]: value = "task-3240083" [ 814.467488] env[68674]: _type = "Task" [ 814.467488] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.472848] env[68674]: DEBUG oslo_vmware.api [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 814.472848] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52140f9f-b5ea-7331-3a2a-b50442a121a5" [ 814.472848] env[68674]: _type = "Task" [ 814.472848] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.480224] env[68674]: DEBUG oslo_vmware.api [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240083, 'name': Destroy_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.486318] env[68674]: DEBUG oslo_vmware.api [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52140f9f-b5ea-7331-3a2a-b50442a121a5, 'name': SearchDatastore_Task, 'duration_secs': 0.010883} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.487443] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-406410b7-5c21-450a-bb40-4e1c5fd6e549 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.493299] env[68674]: DEBUG oslo_vmware.api [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 814.493299] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52eb2a82-34e3-222d-28aa-f5428fcdf044" [ 814.493299] env[68674]: _type = "Task" [ 814.493299] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.503697] env[68674]: DEBUG oslo_vmware.api [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52eb2a82-34e3-222d-28aa-f5428fcdf044, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.508480] env[68674]: DEBUG nova.compute.manager [req-c00ca76c-116a-4f5d-9c89-1445af31e169 req-13b6667f-ca1e-4da7-ad46-b23b442adfc2 service nova] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Received event network-changed-a6b5f701-3612-4016-86eb-a45074df2088 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 814.508706] env[68674]: DEBUG nova.compute.manager [req-c00ca76c-116a-4f5d-9c89-1445af31e169 req-13b6667f-ca1e-4da7-ad46-b23b442adfc2 service nova] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Refreshing instance network info cache due to event network-changed-a6b5f701-3612-4016-86eb-a45074df2088. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 814.508939] env[68674]: DEBUG oslo_concurrency.lockutils [req-c00ca76c-116a-4f5d-9c89-1445af31e169 req-13b6667f-ca1e-4da7-ad46-b23b442adfc2 service nova] Acquiring lock "refresh_cache-5bd42044-84f5-4f48-aa97-b7cf990ed35d" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.509092] env[68674]: DEBUG oslo_concurrency.lockutils [req-c00ca76c-116a-4f5d-9c89-1445af31e169 req-13b6667f-ca1e-4da7-ad46-b23b442adfc2 service nova] Acquired lock "refresh_cache-5bd42044-84f5-4f48-aa97-b7cf990ed35d" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 814.509256] env[68674]: DEBUG nova.network.neutron [req-c00ca76c-116a-4f5d-9c89-1445af31e169 req-13b6667f-ca1e-4da7-ad46-b23b442adfc2 service nova] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Refreshing network info cache for port a6b5f701-3612-4016-86eb-a45074df2088 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 814.568410] env[68674]: DEBUG nova.compute.manager [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 814.620292] env[68674]: INFO nova.compute.manager [-] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Took 1.03 seconds to deallocate network for instance. [ 814.644577] env[68674]: DEBUG oslo_vmware.api [None req-04aa8ce3-175b-4d0a-8a2b-73b4712730b4 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3240080, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.166504} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.647506] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-04aa8ce3-175b-4d0a-8a2b-73b4712730b4 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 814.647702] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-04aa8ce3-175b-4d0a-8a2b-73b4712730b4 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 814.647882] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-04aa8ce3-175b-4d0a-8a2b-73b4712730b4 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 814.648156] env[68674]: INFO nova.compute.manager [None req-04aa8ce3-175b-4d0a-8a2b-73b4712730b4 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Took 1.13 seconds to destroy the instance on the hypervisor. [ 814.648419] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-04aa8ce3-175b-4d0a-8a2b-73b4712730b4 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 814.648836] env[68674]: DEBUG nova.compute.manager [-] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 814.648924] env[68674]: DEBUG nova.network.neutron [-] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 814.951456] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240082, 'name': CreateVM_Task, 'duration_secs': 0.392796} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.953747] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 814.954470] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.954647] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 814.954987] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 814.957835] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80a5a812-a289-479e-b72f-c4866a88eb31 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.962940] env[68674]: DEBUG oslo_vmware.api [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Waiting for the task: (returnval){ [ 814.962940] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52523040-7452-a214-f70b-d9870404c286" [ 814.962940] env[68674]: _type = "Task" [ 814.962940] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.971588] env[68674]: DEBUG oslo_vmware.api [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52523040-7452-a214-f70b-d9870404c286, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.982230] env[68674]: DEBUG oslo_vmware.api [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240083, 'name': Destroy_Task, 'duration_secs': 0.495166} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.982513] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Destroyed the VM [ 814.982763] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Deleting Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 814.983035] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-d114b2b5-8b5f-4737-899b-1124c7af9a17 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.994386] env[68674]: DEBUG oslo_vmware.api [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 814.994386] env[68674]: value = "task-3240084" [ 814.994386] env[68674]: _type = "Task" [ 814.994386] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.006362] env[68674]: DEBUG oslo_vmware.api [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52eb2a82-34e3-222d-28aa-f5428fcdf044, 'name': SearchDatastore_Task, 'duration_secs': 0.015769} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.009628] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 815.010548] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] f029042f-d80b-453e-adc9-1e65d7da7aaf/b84d9354-ef6b-46ca-9dae-6549fa89bbea-rescue.vmdk. {{(pid=68674) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 815.010548] env[68674]: DEBUG oslo_vmware.api [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240084, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.012971] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8131c4b8-6b57-418e-a177-05bd51b517e7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.020856] env[68674]: DEBUG oslo_vmware.api [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 815.020856] env[68674]: value = "task-3240085" [ 815.020856] env[68674]: _type = "Task" [ 815.020856] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.029334] env[68674]: DEBUG oslo_vmware.api [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240085, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.129929] env[68674]: DEBUG oslo_concurrency.lockutils [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 815.167682] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85dc41e2-e288-4b4d-aa21-3d52237518e3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.177321] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eb11ddd-5d1d-448c-ba56-149483930ae3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.215055] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0914e14-d8ae-41ff-a8ed-9a7aed525bac {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.225444] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c119324c-3f69-44e4-9640-2eac25e33aa7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.246764] env[68674]: DEBUG nova.compute.provider_tree [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 815.325507] env[68674]: DEBUG nova.network.neutron [None req-2077664a-dd87-4a0f-b6f0-24029664104e tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Updating instance_info_cache with network_info: [{"id": "0a9b5c9d-f85d-42f8-9e17-5a63a48e7ea4", "address": "fa:16:3e:55:82:ec", "network": {"id": "69e2ef02-944e-40b2-88f2-3a00c754a5eb", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-125455610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "435fbf1f847d4d36ba126fc8c49b59fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3e0aae3-33d1-403b-bfaf-306f77a1422e", "external-id": "nsx-vlan-transportzone-211", "segmentation_id": 211, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a9b5c9d-f8", "ovs_interfaceid": "0a9b5c9d-f85d-42f8-9e17-5a63a48e7ea4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 815.477136] env[68674]: DEBUG oslo_vmware.api [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52523040-7452-a214-f70b-d9870404c286, 'name': SearchDatastore_Task, 'duration_secs': 0.009484} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.477721] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 815.477780] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 815.478148] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.478354] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 815.478572] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 815.478935] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-079c0c2a-bb20-4d81-af41-170b405d4c05 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.482241] env[68674]: DEBUG nova.network.neutron [req-c00ca76c-116a-4f5d-9c89-1445af31e169 req-13b6667f-ca1e-4da7-ad46-b23b442adfc2 service nova] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Updated VIF entry in instance network info cache for port a6b5f701-3612-4016-86eb-a45074df2088. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 815.482582] env[68674]: DEBUG nova.network.neutron [req-c00ca76c-116a-4f5d-9c89-1445af31e169 req-13b6667f-ca1e-4da7-ad46-b23b442adfc2 service nova] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Updating instance_info_cache with network_info: [{"id": "a6b5f701-3612-4016-86eb-a45074df2088", "address": "fa:16:3e:3b:4b:ba", "network": {"id": "c49e5bae-b7f8-4568-9b4e-78ce6b3506e1", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-2053822217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5555b26beb384c7680587cfdb67d9a10", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6b5f701-36", "ovs_interfaceid": "a6b5f701-3612-4016-86eb-a45074df2088", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 815.493144] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 815.493420] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 815.494464] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9b72092-14fc-4879-99cd-f6599d961f2a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.505536] env[68674]: DEBUG oslo_vmware.api [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Waiting for the task: (returnval){ [ 815.505536] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5277f27e-98c8-1f8b-5f6b-00881d7b8b9b" [ 815.505536] env[68674]: _type = "Task" [ 815.505536] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.516677] env[68674]: DEBUG oslo_vmware.api [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240084, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.522953] env[68674]: DEBUG oslo_vmware.api [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5277f27e-98c8-1f8b-5f6b-00881d7b8b9b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.531897] env[68674]: DEBUG oslo_vmware.api [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240085, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.576018] env[68674]: DEBUG nova.compute.manager [req-2edbd185-7999-409e-b103-14dcea7db04d req-e550b64f-2011-42da-ac44-d4f59d376405 service nova] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Received event network-vif-deleted-38369d37-449e-4f62-940b-9700d870d8c6 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 815.576249] env[68674]: INFO nova.compute.manager [req-2edbd185-7999-409e-b103-14dcea7db04d req-e550b64f-2011-42da-ac44-d4f59d376405 service nova] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Neutron deleted interface 38369d37-449e-4f62-940b-9700d870d8c6; detaching it from the instance and deleting it from the info cache [ 815.576455] env[68674]: DEBUG nova.network.neutron [req-2edbd185-7999-409e-b103-14dcea7db04d req-e550b64f-2011-42da-ac44-d4f59d376405 service nova] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 815.586476] env[68674]: DEBUG nova.compute.manager [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 815.619810] env[68674]: DEBUG nova.virt.hardware [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 815.620857] env[68674]: DEBUG nova.virt.hardware [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 815.620857] env[68674]: DEBUG nova.virt.hardware [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 815.620857] env[68674]: DEBUG nova.virt.hardware [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 815.622051] env[68674]: DEBUG nova.virt.hardware [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 815.622364] env[68674]: DEBUG nova.virt.hardware [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 815.622695] env[68674]: DEBUG nova.virt.hardware [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 815.623573] env[68674]: DEBUG nova.virt.hardware [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 815.623573] env[68674]: DEBUG nova.virt.hardware [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 815.623573] env[68674]: DEBUG nova.virt.hardware [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 815.623748] env[68674]: DEBUG nova.virt.hardware [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 815.624853] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7c94cef-289f-4d89-9d56-9a3dd92c60cb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.633546] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b067cebb-03ad-47cf-a9fd-cf604d668fa6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.664464] env[68674]: DEBUG nova.network.neutron [-] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 815.771667] env[68674]: ERROR nova.scheduler.client.report [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [req-f036ac64-fd0e-4186-a217-3dbf54b45c39] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ade3f042-7427-494b-9654-0b65e074850c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f036ac64-fd0e-4186-a217-3dbf54b45c39"}]} [ 815.790056] env[68674]: DEBUG nova.scheduler.client.report [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Refreshing inventories for resource provider ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 815.803481] env[68674]: DEBUG nova.scheduler.client.report [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Updating ProviderTree inventory for provider ade3f042-7427-494b-9654-0b65e074850c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 815.804177] env[68674]: DEBUG nova.compute.provider_tree [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 815.816725] env[68674]: DEBUG nova.scheduler.client.report [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Refreshing aggregate associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, aggregates: None {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 815.829722] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2077664a-dd87-4a0f-b6f0-24029664104e tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Releasing lock "refresh_cache-7329e503-d87d-4e15-b181-65ac6e376781" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 815.834313] env[68674]: DEBUG nova.scheduler.client.report [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Refreshing trait associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 815.987824] env[68674]: DEBUG oslo_concurrency.lockutils [req-c00ca76c-116a-4f5d-9c89-1445af31e169 req-13b6667f-ca1e-4da7-ad46-b23b442adfc2 service nova] Releasing lock "refresh_cache-5bd42044-84f5-4f48-aa97-b7cf990ed35d" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 816.009721] env[68674]: DEBUG oslo_vmware.api [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240084, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.021783] env[68674]: DEBUG oslo_vmware.api [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5277f27e-98c8-1f8b-5f6b-00881d7b8b9b, 'name': SearchDatastore_Task, 'duration_secs': 0.025398} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.022802] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7fa9724-bb30-4562-bc40-4e527b9144b5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.032489] env[68674]: DEBUG oslo_vmware.api [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Waiting for the task: (returnval){ [ 816.032489] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d1d8fe-5ab6-a890-54c1-b98c123c3684" [ 816.032489] env[68674]: _type = "Task" [ 816.032489] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.035889] env[68674]: DEBUG oslo_vmware.api [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240085, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.527294} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.043299] env[68674]: INFO nova.virt.vmwareapi.ds_util [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] f029042f-d80b-453e-adc9-1e65d7da7aaf/b84d9354-ef6b-46ca-9dae-6549fa89bbea-rescue.vmdk. [ 816.044637] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-745a099b-230a-4ee7-a623-82f692d27884 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.075214] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Reconfiguring VM instance instance-00000035 to attach disk [datastore1] f029042f-d80b-453e-adc9-1e65d7da7aaf/b84d9354-ef6b-46ca-9dae-6549fa89bbea-rescue.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 816.083142] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fe67bd96-e89a-49a0-81a2-5a3f15f4d896 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.099328] env[68674]: DEBUG oslo_vmware.api [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d1d8fe-5ab6-a890-54c1-b98c123c3684, 'name': SearchDatastore_Task, 'duration_secs': 0.009865} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.099328] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d179dad5-ffdb-4330-8da3-af12c5a27960 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.101543] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 816.101974] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 5bd42044-84f5-4f48-aa97-b7cf990ed35d/5bd42044-84f5-4f48-aa97-b7cf990ed35d.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 816.105128] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-51997380-d157-40a1-a170-c6e3acb21d9f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.107412] env[68674]: DEBUG oslo_vmware.api [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 816.107412] env[68674]: value = "task-3240086" [ 816.107412] env[68674]: _type = "Task" [ 816.107412] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.114490] env[68674]: DEBUG oslo_vmware.api [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Waiting for the task: (returnval){ [ 816.114490] env[68674]: value = "task-3240087" [ 816.114490] env[68674]: _type = "Task" [ 816.114490] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.119670] env[68674]: DEBUG oslo_concurrency.lockutils [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "95386cdb-c2e4-476a-8aaf-e10fdc40b591" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 816.120808] env[68674]: DEBUG oslo_concurrency.lockutils [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "95386cdb-c2e4-476a-8aaf-e10fdc40b591" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 816.124017] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-016af2aa-5ab8-48f2-9de1-0ef618a329b2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.142485] env[68674]: DEBUG nova.compute.manager [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 816.148488] env[68674]: DEBUG oslo_vmware.api [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240086, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.156433] env[68674]: DEBUG oslo_vmware.api [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3240087, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.174068] env[68674]: INFO nova.compute.manager [-] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Took 1.52 seconds to deallocate network for instance. [ 816.177348] env[68674]: DEBUG nova.compute.manager [req-2edbd185-7999-409e-b103-14dcea7db04d req-e550b64f-2011-42da-ac44-d4f59d376405 service nova] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Detach interface failed, port_id=38369d37-449e-4f62-940b-9700d870d8c6, reason: Instance 02d4aee3-7267-4658-a277-8a9a00dd9f6e could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 816.308000] env[68674]: DEBUG nova.network.neutron [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Successfully updated port: 14aed01d-1948-4a24-8075-fa23078ec7a6 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 816.338421] env[68674]: DEBUG nova.compute.manager [None req-2077664a-dd87-4a0f-b6f0-24029664104e tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 816.339011] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8bf0061-ab47-4bc5-8b7d-a4a09ec7f15e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.474687] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29d52a6e-14f8-4c61-ad13-ae62b4675c6a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.487971] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c651325-6df5-40a1-a7d0-43a898c0e32c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.527070] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e2e6a61-3df9-43b8-b9d2-8eac00b149e3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.540233] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffddab01-e3fd-4a67-82af-1b69b1ea151f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.545489] env[68674]: DEBUG oslo_vmware.api [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240084, 'name': RemoveSnapshot_Task, 'duration_secs': 1.037738} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.546948] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Deleted Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 816.547237] env[68674]: INFO nova.compute.manager [None req-5fe8fc85-482e-4702-ac38-c540e93a5e5f tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Took 15.43 seconds to snapshot the instance on the hypervisor. [ 816.550607] env[68674]: DEBUG nova.compute.manager [req-cafbe884-997e-4a2a-8d99-432d5a403d88 req-7b917e04-7f42-40f8-a6b3-77a30b93415c service nova] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Received event network-vif-plugged-14aed01d-1948-4a24-8075-fa23078ec7a6 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 816.550825] env[68674]: DEBUG oslo_concurrency.lockutils [req-cafbe884-997e-4a2a-8d99-432d5a403d88 req-7b917e04-7f42-40f8-a6b3-77a30b93415c service nova] Acquiring lock "a4cb1632-eada-4b10-a66f-64fecf45fd76-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 816.551051] env[68674]: DEBUG oslo_concurrency.lockutils [req-cafbe884-997e-4a2a-8d99-432d5a403d88 req-7b917e04-7f42-40f8-a6b3-77a30b93415c service nova] Lock "a4cb1632-eada-4b10-a66f-64fecf45fd76-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 816.551226] env[68674]: DEBUG oslo_concurrency.lockutils [req-cafbe884-997e-4a2a-8d99-432d5a403d88 req-7b917e04-7f42-40f8-a6b3-77a30b93415c service nova] Lock "a4cb1632-eada-4b10-a66f-64fecf45fd76-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 816.551400] env[68674]: DEBUG nova.compute.manager [req-cafbe884-997e-4a2a-8d99-432d5a403d88 req-7b917e04-7f42-40f8-a6b3-77a30b93415c service nova] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] No waiting events found dispatching network-vif-plugged-14aed01d-1948-4a24-8075-fa23078ec7a6 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 816.551554] env[68674]: WARNING nova.compute.manager [req-cafbe884-997e-4a2a-8d99-432d5a403d88 req-7b917e04-7f42-40f8-a6b3-77a30b93415c service nova] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Received unexpected event network-vif-plugged-14aed01d-1948-4a24-8075-fa23078ec7a6 for instance with vm_state building and task_state spawning. [ 816.551713] env[68674]: DEBUG nova.compute.manager [req-cafbe884-997e-4a2a-8d99-432d5a403d88 req-7b917e04-7f42-40f8-a6b3-77a30b93415c service nova] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Received event network-changed-14aed01d-1948-4a24-8075-fa23078ec7a6 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 816.551914] env[68674]: DEBUG nova.compute.manager [req-cafbe884-997e-4a2a-8d99-432d5a403d88 req-7b917e04-7f42-40f8-a6b3-77a30b93415c service nova] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Refreshing instance network info cache due to event network-changed-14aed01d-1948-4a24-8075-fa23078ec7a6. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 816.552140] env[68674]: DEBUG oslo_concurrency.lockutils [req-cafbe884-997e-4a2a-8d99-432d5a403d88 req-7b917e04-7f42-40f8-a6b3-77a30b93415c service nova] Acquiring lock "refresh_cache-a4cb1632-eada-4b10-a66f-64fecf45fd76" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.552290] env[68674]: DEBUG oslo_concurrency.lockutils [req-cafbe884-997e-4a2a-8d99-432d5a403d88 req-7b917e04-7f42-40f8-a6b3-77a30b93415c service nova] Acquired lock "refresh_cache-a4cb1632-eada-4b10-a66f-64fecf45fd76" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 816.552485] env[68674]: DEBUG nova.network.neutron [req-cafbe884-997e-4a2a-8d99-432d5a403d88 req-7b917e04-7f42-40f8-a6b3-77a30b93415c service nova] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Refreshing network info cache for port 14aed01d-1948-4a24-8075-fa23078ec7a6 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 816.564627] env[68674]: DEBUG nova.compute.provider_tree [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 816.616799] env[68674]: DEBUG oslo_vmware.api [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240086, 'name': ReconfigVM_Task, 'duration_secs': 0.458006} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.617346] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Reconfigured VM instance instance-00000035 to attach disk [datastore1] f029042f-d80b-453e-adc9-1e65d7da7aaf/b84d9354-ef6b-46ca-9dae-6549fa89bbea-rescue.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 816.617943] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40c4bd90-5b08-492d-bbe7-80d45b1ec3da {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.628796] env[68674]: DEBUG oslo_vmware.api [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3240087, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.474935} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.644917] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 5bd42044-84f5-4f48-aa97-b7cf990ed35d/5bd42044-84f5-4f48-aa97-b7cf990ed35d.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 816.645170] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 816.650405] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cec9a3e8-cf1f-4c60-8fd8-90692caa6d0c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.654971] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b0ad2e0e-83db-4e64-b3e6-74c59890094d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.671184] env[68674]: DEBUG oslo_vmware.api [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Waiting for the task: (returnval){ [ 816.671184] env[68674]: value = "task-3240088" [ 816.671184] env[68674]: _type = "Task" [ 816.671184] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.675608] env[68674]: DEBUG oslo_vmware.api [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 816.675608] env[68674]: value = "task-3240089" [ 816.675608] env[68674]: _type = "Task" [ 816.675608] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.676593] env[68674]: DEBUG oslo_concurrency.lockutils [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 816.684575] env[68674]: DEBUG oslo_concurrency.lockutils [None req-04aa8ce3-175b-4d0a-8a2b-73b4712730b4 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 816.684842] env[68674]: DEBUG oslo_vmware.api [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3240088, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.690734] env[68674]: DEBUG oslo_vmware.api [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240089, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.811999] env[68674]: DEBUG oslo_concurrency.lockutils [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "refresh_cache-a4cb1632-eada-4b10-a66f-64fecf45fd76" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.069069] env[68674]: DEBUG nova.scheduler.client.report [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 817.089898] env[68674]: DEBUG nova.network.neutron [req-cafbe884-997e-4a2a-8d99-432d5a403d88 req-7b917e04-7f42-40f8-a6b3-77a30b93415c service nova] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 817.183810] env[68674]: DEBUG oslo_vmware.api [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3240088, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065771} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.184524] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 817.185353] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-debf5d53-febe-4708-b05b-7ccecb29deab {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.191320] env[68674]: DEBUG oslo_vmware.api [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240089, 'name': ReconfigVM_Task, 'duration_secs': 0.177845} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.191630] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 817.191877] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-164dd33d-4733-41f7-9643-8907baab7db1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.211640] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Reconfiguring VM instance instance-0000003c to attach disk [datastore2] 5bd42044-84f5-4f48-aa97-b7cf990ed35d/5bd42044-84f5-4f48-aa97-b7cf990ed35d.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 817.212751] env[68674]: DEBUG nova.network.neutron [req-cafbe884-997e-4a2a-8d99-432d5a403d88 req-7b917e04-7f42-40f8-a6b3-77a30b93415c service nova] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 817.214488] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ba2abe9-6912-48c6-adf3-4d2f7e656174 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.230045] env[68674]: DEBUG oslo_concurrency.lockutils [req-cafbe884-997e-4a2a-8d99-432d5a403d88 req-7b917e04-7f42-40f8-a6b3-77a30b93415c service nova] Releasing lock "refresh_cache-a4cb1632-eada-4b10-a66f-64fecf45fd76" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 817.231613] env[68674]: DEBUG oslo_concurrency.lockutils [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquired lock "refresh_cache-a4cb1632-eada-4b10-a66f-64fecf45fd76" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 817.231765] env[68674]: DEBUG nova.network.neutron [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 817.233310] env[68674]: DEBUG oslo_vmware.api [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 817.233310] env[68674]: value = "task-3240090" [ 817.233310] env[68674]: _type = "Task" [ 817.233310] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.240020] env[68674]: DEBUG oslo_vmware.api [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Waiting for the task: (returnval){ [ 817.240020] env[68674]: value = "task-3240091" [ 817.240020] env[68674]: _type = "Task" [ 817.240020] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.248179] env[68674]: DEBUG oslo_vmware.api [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240090, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.254494] env[68674]: DEBUG oslo_vmware.api [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3240091, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.306268] env[68674]: DEBUG nova.compute.manager [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Stashing vm_state: active {{(pid=68674) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 817.358911] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3bd0c89-c8f0-4c04-a46c-206fbf606846 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.366314] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2077664a-dd87-4a0f-b6f0-24029664104e tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Doing hard reboot of VM {{(pid=68674) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 817.367024] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-aa37db2f-9754-4ae6-b8d2-1b1e8ec27ec6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.372785] env[68674]: DEBUG oslo_vmware.api [None req-2077664a-dd87-4a0f-b6f0-24029664104e tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Waiting for the task: (returnval){ [ 817.372785] env[68674]: value = "task-3240092" [ 817.372785] env[68674]: _type = "Task" [ 817.372785] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.381292] env[68674]: DEBUG oslo_vmware.api [None req-2077664a-dd87-4a0f-b6f0-24029664104e tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3240092, 'name': ResetVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.576430] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.020s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 817.577012] env[68674]: DEBUG nova.compute.manager [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 817.579637] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e86f83c0-80d1-438c-9fc0-9827393eb5f7 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 47.380s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 817.579906] env[68674]: DEBUG nova.objects.instance [None req-e86f83c0-80d1-438c-9fc0-9827393eb5f7 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Lazy-loading 'resources' on Instance uuid 714142ec-89ad-44ab-8543-11493172a50b {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 817.749026] env[68674]: DEBUG oslo_vmware.api [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240090, 'name': PowerOnVM_Task, 'duration_secs': 0.41526} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.749989] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 817.754461] env[68674]: DEBUG oslo_vmware.api [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3240091, 'name': ReconfigVM_Task, 'duration_secs': 0.354349} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.755472] env[68674]: DEBUG nova.compute.manager [None req-d3b56319-c366-4b1e-9ba9-bda6be71403f tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 817.755786] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Reconfigured VM instance instance-0000003c to attach disk [datastore2] 5bd42044-84f5-4f48-aa97-b7cf990ed35d/5bd42044-84f5-4f48-aa97-b7cf990ed35d.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 817.756864] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc1b1a61-b5b3-4eb2-985c-d7fd80f7ff47 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.759386] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-136aef08-b123-41c6-8e2a-ad4003283297 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.767489] env[68674]: DEBUG oslo_vmware.api [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Waiting for the task: (returnval){ [ 817.767489] env[68674]: value = "task-3240093" [ 817.767489] env[68674]: _type = "Task" [ 817.767489] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.779454] env[68674]: DEBUG oslo_vmware.api [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3240093, 'name': Rename_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.780361] env[68674]: DEBUG nova.network.neutron [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 817.825289] env[68674]: DEBUG oslo_concurrency.lockutils [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 817.886593] env[68674]: DEBUG oslo_vmware.api [None req-2077664a-dd87-4a0f-b6f0-24029664104e tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3240092, 'name': ResetVM_Task, 'duration_secs': 0.111652} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.886866] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2077664a-dd87-4a0f-b6f0-24029664104e tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Did hard reboot of VM {{(pid=68674) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 817.887067] env[68674]: DEBUG nova.compute.manager [None req-2077664a-dd87-4a0f-b6f0-24029664104e tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 817.887848] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb12c628-03be-4843-b7b6-94b1ffd6b8c4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.971811] env[68674]: DEBUG nova.network.neutron [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Updating instance_info_cache with network_info: [{"id": "14aed01d-1948-4a24-8075-fa23078ec7a6", "address": "fa:16:3e:8a:45:1b", "network": {"id": "eae87694-bbf6-4eed-9305-26be80e0529b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1262353116-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c958fcb56a934ef7919b76aa2a193429", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14aed01d-19", "ovs_interfaceid": "14aed01d-1948-4a24-8075-fa23078ec7a6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.083180] env[68674]: DEBUG nova.compute.utils [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 818.085155] env[68674]: DEBUG nova.compute.manager [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 818.085349] env[68674]: DEBUG nova.network.neutron [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 818.139949] env[68674]: DEBUG nova.policy [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e268da8edd47413b9b87909dde064f64', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0cee54e456084086866d08b098a24b64', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 818.288785] env[68674]: DEBUG oslo_vmware.api [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3240093, 'name': Rename_Task, 'duration_secs': 0.271835} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.289616] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 818.289922] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-09446c42-cf33-4fb3-8ba4-8773b5109450 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.298038] env[68674]: DEBUG oslo_vmware.api [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Waiting for the task: (returnval){ [ 818.298038] env[68674]: value = "task-3240094" [ 818.298038] env[68674]: _type = "Task" [ 818.298038] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.307633] env[68674]: DEBUG oslo_vmware.api [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3240094, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.401936] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2077664a-dd87-4a0f-b6f0-24029664104e tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Lock "7329e503-d87d-4e15-b181-65ac6e376781" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.946s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 818.437721] env[68674]: DEBUG oslo_vmware.rw_handles [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b5ace5-6b18-d614-9eda-06f8471e5b81/disk-0.vmdk. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 818.439650] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdd935c3-70fa-42e9-a220-298edd6390ec {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.447082] env[68674]: DEBUG oslo_vmware.rw_handles [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b5ace5-6b18-d614-9eda-06f8471e5b81/disk-0.vmdk is in state: ready. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 818.447611] env[68674]: ERROR oslo_vmware.rw_handles [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b5ace5-6b18-d614-9eda-06f8471e5b81/disk-0.vmdk due to incomplete transfer. [ 818.448095] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-73d97be6-df4e-415f-b2a5-a8b4ef91a375 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.460291] env[68674]: DEBUG oslo_vmware.rw_handles [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b5ace5-6b18-d614-9eda-06f8471e5b81/disk-0.vmdk. {{(pid=68674) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 818.460587] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Uploaded image 496b987e-faad-4d33-9ebe-e3f02bf8e7d6 to the Glance image server {{(pid=68674) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 818.464647] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Destroying the VM {{(pid=68674) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 818.464647] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a2045285-9c9d-41eb-b85f-b5ae9daa6ce9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.474964] env[68674]: DEBUG oslo_concurrency.lockutils [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Releasing lock "refresh_cache-a4cb1632-eada-4b10-a66f-64fecf45fd76" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 818.475412] env[68674]: DEBUG nova.compute.manager [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Instance network_info: |[{"id": "14aed01d-1948-4a24-8075-fa23078ec7a6", "address": "fa:16:3e:8a:45:1b", "network": {"id": "eae87694-bbf6-4eed-9305-26be80e0529b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1262353116-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c958fcb56a934ef7919b76aa2a193429", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14aed01d-19", "ovs_interfaceid": "14aed01d-1948-4a24-8075-fa23078ec7a6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 818.477604] env[68674]: DEBUG oslo_vmware.api [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 818.477604] env[68674]: value = "task-3240095" [ 818.477604] env[68674]: _type = "Task" [ 818.477604] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.477604] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8a:45:1b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b98c49ac-0eb7-4311-aa8f-60581b2ce706', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '14aed01d-1948-4a24-8075-fa23078ec7a6', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 818.487120] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 818.490269] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 818.494621] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ea50d5e8-82a4-4d0b-8212-c59de1467bf9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.521423] env[68674]: DEBUG oslo_vmware.api [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240095, 'name': Destroy_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.522767] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 818.522767] env[68674]: value = "task-3240096" [ 818.522767] env[68674]: _type = "Task" [ 818.522767] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.530670] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240096, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.533914] env[68674]: DEBUG nova.network.neutron [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Successfully created port: 7188e58a-318a-4695-b262-797d48d71c63 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 818.592819] env[68674]: DEBUG nova.compute.manager [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 818.620746] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f294c020-c144-4aac-83f3-05ec1306b1f5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.630216] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b5282dc-db34-4137-a66d-e2274bf32ed1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.679582] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cf6d27f-235d-4a01-8a4c-d3a423ac47c8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.690241] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fff4330-3f70-405b-8abd-aa74a031aaa6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.711830] env[68674]: DEBUG nova.compute.provider_tree [None req-e86f83c0-80d1-438c-9fc0-9827393eb5f7 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 818.810152] env[68674]: DEBUG oslo_vmware.api [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3240094, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.000849] env[68674]: DEBUG oslo_vmware.api [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240095, 'name': Destroy_Task} progress is 33%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.033123] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240096, 'name': CreateVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.216035] env[68674]: DEBUG nova.scheduler.client.report [None req-e86f83c0-80d1-438c-9fc0-9827393eb5f7 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 819.223274] env[68674]: DEBUG nova.compute.manager [req-4380a76e-62b0-464f-b92a-c310cb9231e2 req-497911fa-e74f-4a8a-a57f-1f65015aae43 service nova] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Received event network-changed-0a9b5c9d-f85d-42f8-9e17-5a63a48e7ea4 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 819.224366] env[68674]: DEBUG nova.compute.manager [req-4380a76e-62b0-464f-b92a-c310cb9231e2 req-497911fa-e74f-4a8a-a57f-1f65015aae43 service nova] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Refreshing instance network info cache due to event network-changed-0a9b5c9d-f85d-42f8-9e17-5a63a48e7ea4. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 819.224366] env[68674]: DEBUG oslo_concurrency.lockutils [req-4380a76e-62b0-464f-b92a-c310cb9231e2 req-497911fa-e74f-4a8a-a57f-1f65015aae43 service nova] Acquiring lock "refresh_cache-7329e503-d87d-4e15-b181-65ac6e376781" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.224366] env[68674]: DEBUG oslo_concurrency.lockutils [req-4380a76e-62b0-464f-b92a-c310cb9231e2 req-497911fa-e74f-4a8a-a57f-1f65015aae43 service nova] Acquired lock "refresh_cache-7329e503-d87d-4e15-b181-65ac6e376781" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 819.224366] env[68674]: DEBUG nova.network.neutron [req-4380a76e-62b0-464f-b92a-c310cb9231e2 req-497911fa-e74f-4a8a-a57f-1f65015aae43 service nova] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Refreshing network info cache for port 0a9b5c9d-f85d-42f8-9e17-5a63a48e7ea4 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 819.310340] env[68674]: DEBUG oslo_vmware.api [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3240094, 'name': PowerOnVM_Task, 'duration_secs': 0.954035} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.310486] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 819.310852] env[68674]: INFO nova.compute.manager [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Took 8.69 seconds to spawn the instance on the hypervisor. [ 819.310852] env[68674]: DEBUG nova.compute.manager [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 819.311559] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95e1154c-4592-4df5-adde-4ed6ff214c1f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.498248] env[68674]: DEBUG oslo_vmware.api [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240095, 'name': Destroy_Task, 'duration_secs': 0.737847} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.498522] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Destroyed the VM [ 819.498758] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Deleting Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 819.499018] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-86fd27d9-f5df-4646-89e7-fa22934e117c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.506953] env[68674]: DEBUG oslo_vmware.api [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 819.506953] env[68674]: value = "task-3240097" [ 819.506953] env[68674]: _type = "Task" [ 819.506953] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.517866] env[68674]: DEBUG oslo_vmware.api [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240097, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.532705] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240096, 'name': CreateVM_Task, 'duration_secs': 0.815158} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.532831] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 819.533622] env[68674]: DEBUG oslo_concurrency.lockutils [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.534193] env[68674]: DEBUG oslo_concurrency.lockutils [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 819.534193] env[68674]: DEBUG oslo_concurrency.lockutils [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 819.535035] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16f2b50d-3e3d-4c70-93ab-ad8c0d88d5e9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.539355] env[68674]: DEBUG oslo_vmware.api [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 819.539355] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f8dbf0-79a2-835e-2c33-040b10d004c3" [ 819.539355] env[68674]: _type = "Task" [ 819.539355] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.547528] env[68674]: DEBUG oslo_vmware.api [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f8dbf0-79a2-835e-2c33-040b10d004c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.601664] env[68674]: DEBUG nova.compute.manager [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 819.629096] env[68674]: DEBUG nova.virt.hardware [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 819.629354] env[68674]: DEBUG nova.virt.hardware [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 819.629514] env[68674]: DEBUG nova.virt.hardware [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 819.629704] env[68674]: DEBUG nova.virt.hardware [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 819.629858] env[68674]: DEBUG nova.virt.hardware [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 819.629987] env[68674]: DEBUG nova.virt.hardware [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 819.630219] env[68674]: DEBUG nova.virt.hardware [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 819.630370] env[68674]: DEBUG nova.virt.hardware [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 819.631643] env[68674]: DEBUG nova.virt.hardware [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 819.631643] env[68674]: DEBUG nova.virt.hardware [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 819.631643] env[68674]: DEBUG nova.virt.hardware [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 819.631785] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b2b985d-e145-40b5-a601-716496f0c185 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.641140] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58ee2510-afcf-4cf2-b5e8-e6531d91601c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.721505] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e86f83c0-80d1-438c-9fc0-9827393eb5f7 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.142s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 819.724496] env[68674]: DEBUG oslo_concurrency.lockutils [None req-49ceb180-af1c-4662-ba90-7ea1f04b69ec tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 46.386s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 819.724750] env[68674]: DEBUG nova.objects.instance [None req-49ceb180-af1c-4662-ba90-7ea1f04b69ec tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Lazy-loading 'resources' on Instance uuid 627fb348-1749-4480-97b9-b479a182d4ee {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 819.758980] env[68674]: INFO nova.scheduler.client.report [None req-e86f83c0-80d1-438c-9fc0-9827393eb5f7 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Deleted allocations for instance 714142ec-89ad-44ab-8543-11493172a50b [ 819.830125] env[68674]: INFO nova.compute.manager [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Took 56.19 seconds to build instance. [ 819.940396] env[68674]: INFO nova.compute.manager [None req-f7a8d801-f296-4d07-b34a-3eadcb23bc8e tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Unrescuing [ 819.940719] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f7a8d801-f296-4d07-b34a-3eadcb23bc8e tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquiring lock "refresh_cache-f029042f-d80b-453e-adc9-1e65d7da7aaf" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.940961] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f7a8d801-f296-4d07-b34a-3eadcb23bc8e tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquired lock "refresh_cache-f029042f-d80b-453e-adc9-1e65d7da7aaf" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 819.941220] env[68674]: DEBUG nova.network.neutron [None req-f7a8d801-f296-4d07-b34a-3eadcb23bc8e tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 819.987157] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Acquiring lock "7329e503-d87d-4e15-b181-65ac6e376781" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 819.987672] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Lock "7329e503-d87d-4e15-b181-65ac6e376781" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.002s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 819.987815] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Acquiring lock "7329e503-d87d-4e15-b181-65ac6e376781-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 819.987993] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Lock "7329e503-d87d-4e15-b181-65ac6e376781-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 819.988207] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Lock "7329e503-d87d-4e15-b181-65ac6e376781-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 819.993906] env[68674]: INFO nova.compute.manager [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Terminating instance [ 820.019611] env[68674]: DEBUG oslo_vmware.api [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240097, 'name': RemoveSnapshot_Task} progress is 65%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.050653] env[68674]: DEBUG oslo_vmware.api [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f8dbf0-79a2-835e-2c33-040b10d004c3, 'name': SearchDatastore_Task, 'duration_secs': 0.014645} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.051929] env[68674]: DEBUG nova.network.neutron [req-4380a76e-62b0-464f-b92a-c310cb9231e2 req-497911fa-e74f-4a8a-a57f-1f65015aae43 service nova] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Updated VIF entry in instance network info cache for port 0a9b5c9d-f85d-42f8-9e17-5a63a48e7ea4. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 820.052016] env[68674]: DEBUG nova.network.neutron [req-4380a76e-62b0-464f-b92a-c310cb9231e2 req-497911fa-e74f-4a8a-a57f-1f65015aae43 service nova] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Updating instance_info_cache with network_info: [{"id": "0a9b5c9d-f85d-42f8-9e17-5a63a48e7ea4", "address": "fa:16:3e:55:82:ec", "network": {"id": "69e2ef02-944e-40b2-88f2-3a00c754a5eb", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-125455610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "435fbf1f847d4d36ba126fc8c49b59fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3e0aae3-33d1-403b-bfaf-306f77a1422e", "external-id": "nsx-vlan-transportzone-211", "segmentation_id": 211, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a9b5c9d-f8", "ovs_interfaceid": "0a9b5c9d-f85d-42f8-9e17-5a63a48e7ea4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.054090] env[68674]: DEBUG oslo_concurrency.lockutils [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 820.054362] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 820.054619] env[68674]: DEBUG oslo_concurrency.lockutils [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.054781] env[68674]: DEBUG oslo_concurrency.lockutils [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 820.054965] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 820.055435] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e2698965-3241-4afc-8885-c24cf5a70d9b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.066831] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 820.066831] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 820.066953] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94a35db8-db4b-4fd4-9952-1752e03e88fc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.074437] env[68674]: DEBUG nova.compute.manager [req-69425ec5-3053-4dbe-85c4-e52c0a5cddda req-45efafb7-ca92-4aff-9f02-eff6396924d5 service nova] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Received event network-vif-plugged-7188e58a-318a-4695-b262-797d48d71c63 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 820.074711] env[68674]: DEBUG oslo_concurrency.lockutils [req-69425ec5-3053-4dbe-85c4-e52c0a5cddda req-45efafb7-ca92-4aff-9f02-eff6396924d5 service nova] Acquiring lock "e371ae6b-44fd-47ce-9c58-8981e7da5cbc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 820.074973] env[68674]: DEBUG oslo_concurrency.lockutils [req-69425ec5-3053-4dbe-85c4-e52c0a5cddda req-45efafb7-ca92-4aff-9f02-eff6396924d5 service nova] Lock "e371ae6b-44fd-47ce-9c58-8981e7da5cbc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 820.075314] env[68674]: DEBUG oslo_concurrency.lockutils [req-69425ec5-3053-4dbe-85c4-e52c0a5cddda req-45efafb7-ca92-4aff-9f02-eff6396924d5 service nova] Lock "e371ae6b-44fd-47ce-9c58-8981e7da5cbc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 820.075362] env[68674]: DEBUG nova.compute.manager [req-69425ec5-3053-4dbe-85c4-e52c0a5cddda req-45efafb7-ca92-4aff-9f02-eff6396924d5 service nova] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] No waiting events found dispatching network-vif-plugged-7188e58a-318a-4695-b262-797d48d71c63 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 820.075535] env[68674]: WARNING nova.compute.manager [req-69425ec5-3053-4dbe-85c4-e52c0a5cddda req-45efafb7-ca92-4aff-9f02-eff6396924d5 service nova] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Received unexpected event network-vif-plugged-7188e58a-318a-4695-b262-797d48d71c63 for instance with vm_state building and task_state spawning. [ 820.077613] env[68674]: DEBUG oslo_vmware.api [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 820.077613] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52fe0396-398b-601b-aff1-14e39900b537" [ 820.077613] env[68674]: _type = "Task" [ 820.077613] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.088472] env[68674]: DEBUG oslo_vmware.api [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52fe0396-398b-601b-aff1-14e39900b537, 'name': SearchDatastore_Task, 'duration_secs': 0.011386} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.088771] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8df21fc-28b5-4ca3-ad9e-89e0d543ac03 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.095135] env[68674]: DEBUG oslo_vmware.api [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 820.095135] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52637d2e-563c-c676-d38e-1aae8338a680" [ 820.095135] env[68674]: _type = "Task" [ 820.095135] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.104114] env[68674]: DEBUG oslo_vmware.api [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52637d2e-563c-c676-d38e-1aae8338a680, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.199224] env[68674]: DEBUG nova.network.neutron [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Successfully updated port: 7188e58a-318a-4695-b262-797d48d71c63 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 820.266559] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e86f83c0-80d1-438c-9fc0-9827393eb5f7 tempest-ServerAddressesNegativeTestJSON-1292321572 tempest-ServerAddressesNegativeTestJSON-1292321572-project-member] Lock "714142ec-89ad-44ab-8543-11493172a50b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 53.709s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 820.333232] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2bfb7100-8d96-4fad-bbed-285cd52beed1 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Lock "5bd42044-84f5-4f48-aa97-b7cf990ed35d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 90.792s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 820.498837] env[68674]: DEBUG nova.compute.manager [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 820.499088] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 820.500037] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa9495aa-8225-4edd-9eaa-4107053f78d1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.510017] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 820.515273] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ffdcfca3-5db6-4bb7-9fd9-87f99a22106a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.521767] env[68674]: DEBUG oslo_vmware.api [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240097, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.527786] env[68674]: DEBUG oslo_vmware.api [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Waiting for the task: (returnval){ [ 820.527786] env[68674]: value = "task-3240098" [ 820.527786] env[68674]: _type = "Task" [ 820.527786] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.539172] env[68674]: DEBUG oslo_vmware.api [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3240098, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.554425] env[68674]: DEBUG oslo_concurrency.lockutils [req-4380a76e-62b0-464f-b92a-c310cb9231e2 req-497911fa-e74f-4a8a-a57f-1f65015aae43 service nova] Releasing lock "refresh_cache-7329e503-d87d-4e15-b181-65ac6e376781" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 820.606046] env[68674]: DEBUG oslo_vmware.api [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52637d2e-563c-c676-d38e-1aae8338a680, 'name': SearchDatastore_Task, 'duration_secs': 0.016911} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.613097] env[68674]: DEBUG oslo_concurrency.lockutils [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 820.613539] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] a4cb1632-eada-4b10-a66f-64fecf45fd76/a4cb1632-eada-4b10-a66f-64fecf45fd76.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 820.616537] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-25b62b00-83c5-4604-b747-d7bfac3c2314 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.621728] env[68674]: DEBUG oslo_vmware.api [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 820.621728] env[68674]: value = "task-3240099" [ 820.621728] env[68674]: _type = "Task" [ 820.621728] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.636093] env[68674]: DEBUG oslo_vmware.api [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240099, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.701581] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "refresh_cache-e371ae6b-44fd-47ce-9c58-8981e7da5cbc" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.703559] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquired lock "refresh_cache-e371ae6b-44fd-47ce-9c58-8981e7da5cbc" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 820.703559] env[68674]: DEBUG nova.network.neutron [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 820.728287] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d6b92ca-4052-471b-aff5-b1f5b7f179bd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.735808] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12189f2b-34c0-4ed2-a6a3-293363a4500a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.745277] env[68674]: DEBUG nova.network.neutron [None req-f7a8d801-f296-4d07-b34a-3eadcb23bc8e tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Updating instance_info_cache with network_info: [{"id": "eb762aca-5fbd-45f4-a81e-77d2c5d7aaf7", "address": "fa:16:3e:21:74:75", "network": {"id": "1674c138-dbec-4d03-b5b0-d1944ab38577", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-143958570-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a84d9d6e23bd40049c34e6f087252b4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb762aca-5f", "ovs_interfaceid": "eb762aca-5fbd-45f4-a81e-77d2c5d7aaf7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.777401] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a45eea9-a54a-462b-a34c-71bac6cff53b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.786409] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06726be1-46be-4730-9a9e-3bd9b1ebab1e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.800144] env[68674]: DEBUG nova.compute.provider_tree [None req-49ceb180-af1c-4662-ba90-7ea1f04b69ec tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 821.019401] env[68674]: DEBUG oslo_vmware.api [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240097, 'name': RemoveSnapshot_Task, 'duration_secs': 1.018944} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.020611] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Deleted Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 821.020611] env[68674]: INFO nova.compute.manager [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Took 16.34 seconds to snapshot the instance on the hypervisor. [ 821.039309] env[68674]: DEBUG oslo_vmware.api [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3240098, 'name': PowerOffVM_Task, 'duration_secs': 0.190087} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.039573] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 821.039742] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 821.040014] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-36dbd1fb-dbe4-4d49-84dd-8e6b35078419 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.121664] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 821.122177] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 821.122312] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Deleting the datastore file [datastore1] 7329e503-d87d-4e15-b181-65ac6e376781 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 821.122651] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-949322a8-53dc-49a0-9ca3-4022db277090 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.139667] env[68674]: DEBUG oslo_vmware.api [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240099, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.141135] env[68674]: DEBUG oslo_vmware.api [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Waiting for the task: (returnval){ [ 821.141135] env[68674]: value = "task-3240101" [ 821.141135] env[68674]: _type = "Task" [ 821.141135] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.149785] env[68674]: DEBUG oslo_vmware.api [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3240101, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.248329] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f7a8d801-f296-4d07-b34a-3eadcb23bc8e tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Releasing lock "refresh_cache-f029042f-d80b-453e-adc9-1e65d7da7aaf" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 821.249844] env[68674]: DEBUG nova.objects.instance [None req-f7a8d801-f296-4d07-b34a-3eadcb23bc8e tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lazy-loading 'flavor' on Instance uuid f029042f-d80b-453e-adc9-1e65d7da7aaf {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 821.265873] env[68674]: DEBUG nova.network.neutron [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 821.304592] env[68674]: DEBUG nova.scheduler.client.report [None req-49ceb180-af1c-4662-ba90-7ea1f04b69ec tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 821.561438] env[68674]: DEBUG nova.network.neutron [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Updating instance_info_cache with network_info: [{"id": "7188e58a-318a-4695-b262-797d48d71c63", "address": "fa:16:3e:5f:03:ed", "network": {"id": "c0c4733f-8d0b-4cee-883f-2ad57ed16158", "bridge": "br-int", "label": "tempest-ServersTestJSON-40114649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cee54e456084086866d08b098a24b64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7188e58a-31", "ovs_interfaceid": "7188e58a-318a-4695-b262-797d48d71c63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.573202] env[68674]: DEBUG nova.compute.manager [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Found 3 images (rotation: 2) {{(pid=68674) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 821.573502] env[68674]: DEBUG nova.compute.manager [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Rotating out 1 backups {{(pid=68674) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5025}} [ 821.574376] env[68674]: DEBUG nova.compute.manager [None req-0f3731fb-f7cb-4cd1-a33b-1bbc21527d43 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Deleting image cfe66e69-342f-49f5-a686-4971a7ab072c {{(pid=68674) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5030}} [ 821.635776] env[68674]: DEBUG oslo_vmware.api [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240099, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.987217} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.636353] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] a4cb1632-eada-4b10-a66f-64fecf45fd76/a4cb1632-eada-4b10-a66f-64fecf45fd76.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 821.636618] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 821.636964] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6dec1f31-87d9-48a3-8452-168664df1f83 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.643048] env[68674]: DEBUG oslo_vmware.api [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 821.643048] env[68674]: value = "task-3240102" [ 821.643048] env[68674]: _type = "Task" [ 821.643048] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.663060] env[68674]: DEBUG oslo_vmware.api [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240102, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.663342] env[68674]: DEBUG oslo_vmware.api [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3240101, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.488323} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.663573] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 821.663754] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 821.663925] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 821.664132] env[68674]: INFO nova.compute.manager [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Took 1.17 seconds to destroy the instance on the hypervisor. [ 821.664375] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 821.664573] env[68674]: DEBUG nova.compute.manager [-] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 821.664676] env[68674]: DEBUG nova.network.neutron [-] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 821.756882] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a29ae1f8-5ea3-41f5-8f90-34f4c8f4151c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.782537] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7a8d801-f296-4d07-b34a-3eadcb23bc8e tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 821.785066] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-920669c2-4324-4135-8b0d-6fe46525a21a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.791239] env[68674]: DEBUG oslo_vmware.api [None req-f7a8d801-f296-4d07-b34a-3eadcb23bc8e tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 821.791239] env[68674]: value = "task-3240103" [ 821.791239] env[68674]: _type = "Task" [ 821.791239] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.799043] env[68674]: DEBUG oslo_vmware.api [None req-f7a8d801-f296-4d07-b34a-3eadcb23bc8e tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240103, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.810363] env[68674]: DEBUG oslo_concurrency.lockutils [None req-49ceb180-af1c-4662-ba90-7ea1f04b69ec tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.086s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 821.814049] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bca91cbe-540d-48a7-912a-2085c2495501 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 44.704s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 821.814049] env[68674]: DEBUG nova.objects.instance [None req-bca91cbe-540d-48a7-912a-2085c2495501 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Lazy-loading 'resources' on Instance uuid 503e9328-bbd8-414f-8bea-250ed8247d67 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 821.839737] env[68674]: INFO nova.scheduler.client.report [None req-49ceb180-af1c-4662-ba90-7ea1f04b69ec tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Deleted allocations for instance 627fb348-1749-4480-97b9-b479a182d4ee [ 822.066016] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Releasing lock "refresh_cache-e371ae6b-44fd-47ce-9c58-8981e7da5cbc" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 822.066016] env[68674]: DEBUG nova.compute.manager [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Instance network_info: |[{"id": "7188e58a-318a-4695-b262-797d48d71c63", "address": "fa:16:3e:5f:03:ed", "network": {"id": "c0c4733f-8d0b-4cee-883f-2ad57ed16158", "bridge": "br-int", "label": "tempest-ServersTestJSON-40114649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cee54e456084086866d08b098a24b64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7188e58a-31", "ovs_interfaceid": "7188e58a-318a-4695-b262-797d48d71c63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 822.066016] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5f:03:ed', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7188e58a-318a-4695-b262-797d48d71c63', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 822.073675] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Creating folder: Project (0cee54e456084086866d08b098a24b64). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 822.074181] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-232dbc76-c0c0-4f5a-a8ea-b9767e97c508 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.089247] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Created folder: Project (0cee54e456084086866d08b098a24b64) in parent group-v647377. [ 822.089247] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Creating folder: Instances. Parent ref: group-v647562. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 822.089247] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a6141f8d-79a1-4658-b173-d310db30573c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.097449] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Created folder: Instances in parent group-v647562. [ 822.097692] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 822.099031] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 822.099031] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-21ef5d90-008d-42d4-930a-96458b768767 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.122855] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 822.122855] env[68674]: value = "task-3240106" [ 822.122855] env[68674]: _type = "Task" [ 822.122855] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.130845] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240106, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.153696] env[68674]: DEBUG nova.compute.manager [req-9e751272-e23a-4b94-ae1c-0ab646d1afdc req-e22dbc2c-4022-4a8d-873f-2c302b417540 service nova] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Received event network-changed-7188e58a-318a-4695-b262-797d48d71c63 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 822.154388] env[68674]: DEBUG nova.compute.manager [req-9e751272-e23a-4b94-ae1c-0ab646d1afdc req-e22dbc2c-4022-4a8d-873f-2c302b417540 service nova] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Refreshing instance network info cache due to event network-changed-7188e58a-318a-4695-b262-797d48d71c63. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 822.154388] env[68674]: DEBUG oslo_concurrency.lockutils [req-9e751272-e23a-4b94-ae1c-0ab646d1afdc req-e22dbc2c-4022-4a8d-873f-2c302b417540 service nova] Acquiring lock "refresh_cache-e371ae6b-44fd-47ce-9c58-8981e7da5cbc" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.154388] env[68674]: DEBUG oslo_concurrency.lockutils [req-9e751272-e23a-4b94-ae1c-0ab646d1afdc req-e22dbc2c-4022-4a8d-873f-2c302b417540 service nova] Acquired lock "refresh_cache-e371ae6b-44fd-47ce-9c58-8981e7da5cbc" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 822.155604] env[68674]: DEBUG nova.network.neutron [req-9e751272-e23a-4b94-ae1c-0ab646d1afdc req-e22dbc2c-4022-4a8d-873f-2c302b417540 service nova] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Refreshing network info cache for port 7188e58a-318a-4695-b262-797d48d71c63 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 822.162538] env[68674]: DEBUG oslo_vmware.api [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240102, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066875} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.162538] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 822.163526] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b7c0ba5-0f4d-4eee-bdbb-165365f1bf70 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.190143] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Reconfiguring VM instance instance-0000003d to attach disk [datastore2] a4cb1632-eada-4b10-a66f-64fecf45fd76/a4cb1632-eada-4b10-a66f-64fecf45fd76.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 822.191302] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2275607e-96d4-4e57-bb5b-0583520c3d21 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.215704] env[68674]: DEBUG oslo_vmware.api [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 822.215704] env[68674]: value = "task-3240107" [ 822.215704] env[68674]: _type = "Task" [ 822.215704] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.225808] env[68674]: DEBUG oslo_vmware.api [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240107, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.301764] env[68674]: DEBUG oslo_vmware.api [None req-f7a8d801-f296-4d07-b34a-3eadcb23bc8e tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240103, 'name': PowerOffVM_Task, 'duration_secs': 0.469853} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.302156] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7a8d801-f296-4d07-b34a-3eadcb23bc8e tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 822.307827] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7a8d801-f296-4d07-b34a-3eadcb23bc8e tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Reconfiguring VM instance instance-00000035 to detach disk 2001 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 822.308416] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e7e0558b-e926-4fcd-bb00-c88e8be3231b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.331971] env[68674]: DEBUG oslo_vmware.api [None req-f7a8d801-f296-4d07-b34a-3eadcb23bc8e tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 822.331971] env[68674]: value = "task-3240108" [ 822.331971] env[68674]: _type = "Task" [ 822.331971] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.342843] env[68674]: DEBUG oslo_vmware.api [None req-f7a8d801-f296-4d07-b34a-3eadcb23bc8e tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240108, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.352134] env[68674]: DEBUG oslo_concurrency.lockutils [None req-49ceb180-af1c-4662-ba90-7ea1f04b69ec tempest-ServersAdminNegativeTestJSON-1933407318 tempest-ServersAdminNegativeTestJSON-1933407318-project-member] Lock "627fb348-1749-4480-97b9-b479a182d4ee" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 52.759s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 822.522951] env[68674]: DEBUG nova.network.neutron [-] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.637125] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240106, 'name': CreateVM_Task, 'duration_secs': 0.365744} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.640858] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 822.642137] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.642505] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 822.642850] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 822.643159] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2cd63782-8f97-4f6b-9c52-689cbfcd87ee {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.648032] env[68674]: DEBUG oslo_vmware.api [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 822.648032] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521b9e59-ea2a-b84d-07fa-a1e3a51a22a3" [ 822.648032] env[68674]: _type = "Task" [ 822.648032] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.663037] env[68674]: DEBUG oslo_vmware.api [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521b9e59-ea2a-b84d-07fa-a1e3a51a22a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.725573] env[68674]: DEBUG oslo_vmware.api [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240107, 'name': ReconfigVM_Task, 'duration_secs': 0.320835} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.725868] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Reconfigured VM instance instance-0000003d to attach disk [datastore2] a4cb1632-eada-4b10-a66f-64fecf45fd76/a4cb1632-eada-4b10-a66f-64fecf45fd76.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 822.728911] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3693f77c-1972-4685-80db-3c4924db4e08 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.735578] env[68674]: DEBUG oslo_vmware.api [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 822.735578] env[68674]: value = "task-3240109" [ 822.735578] env[68674]: _type = "Task" [ 822.735578] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.743719] env[68674]: DEBUG oslo_vmware.api [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240109, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.840374] env[68674]: DEBUG oslo_vmware.api [None req-f7a8d801-f296-4d07-b34a-3eadcb23bc8e tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240108, 'name': ReconfigVM_Task, 'duration_secs': 0.339875} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.843325] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7a8d801-f296-4d07-b34a-3eadcb23bc8e tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Reconfigured VM instance instance-00000035 to detach disk 2001 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 822.843539] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7a8d801-f296-4d07-b34a-3eadcb23bc8e tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 822.844030] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b4bc6588-d4d9-4667-940d-9e0ba07801ae {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.850441] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f1acfbb-439a-4049-a7ca-b2bdc6e13e2c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.854558] env[68674]: DEBUG oslo_vmware.api [None req-f7a8d801-f296-4d07-b34a-3eadcb23bc8e tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 822.854558] env[68674]: value = "task-3240110" [ 822.854558] env[68674]: _type = "Task" [ 822.854558] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.863044] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4e9b23f-269c-4965-8581-60f60c2bfc85 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.870305] env[68674]: DEBUG oslo_vmware.api [None req-f7a8d801-f296-4d07-b34a-3eadcb23bc8e tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240110, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.900769] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5030da48-6cc2-434e-8529-ad1074d58be3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Acquiring lock "5bd42044-84f5-4f48-aa97-b7cf990ed35d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 822.901060] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5030da48-6cc2-434e-8529-ad1074d58be3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Lock "5bd42044-84f5-4f48-aa97-b7cf990ed35d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 822.901280] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5030da48-6cc2-434e-8529-ad1074d58be3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Acquiring lock "5bd42044-84f5-4f48-aa97-b7cf990ed35d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 822.901465] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5030da48-6cc2-434e-8529-ad1074d58be3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Lock "5bd42044-84f5-4f48-aa97-b7cf990ed35d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 822.901634] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5030da48-6cc2-434e-8529-ad1074d58be3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Lock "5bd42044-84f5-4f48-aa97-b7cf990ed35d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 822.906515] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-343e8f15-4c18-468b-bdf5-e2a1bda5e5cd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.909517] env[68674]: INFO nova.compute.manager [None req-5030da48-6cc2-434e-8529-ad1074d58be3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Terminating instance [ 822.916051] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58ba047a-92f4-494b-8d7c-6575fae30c21 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.933037] env[68674]: DEBUG nova.compute.provider_tree [None req-bca91cbe-540d-48a7-912a-2085c2495501 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 822.943546] env[68674]: DEBUG nova.network.neutron [req-9e751272-e23a-4b94-ae1c-0ab646d1afdc req-e22dbc2c-4022-4a8d-873f-2c302b417540 service nova] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Updated VIF entry in instance network info cache for port 7188e58a-318a-4695-b262-797d48d71c63. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 822.943921] env[68674]: DEBUG nova.network.neutron [req-9e751272-e23a-4b94-ae1c-0ab646d1afdc req-e22dbc2c-4022-4a8d-873f-2c302b417540 service nova] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Updating instance_info_cache with network_info: [{"id": "7188e58a-318a-4695-b262-797d48d71c63", "address": "fa:16:3e:5f:03:ed", "network": {"id": "c0c4733f-8d0b-4cee-883f-2ad57ed16158", "bridge": "br-int", "label": "tempest-ServersTestJSON-40114649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cee54e456084086866d08b098a24b64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7188e58a-31", "ovs_interfaceid": "7188e58a-318a-4695-b262-797d48d71c63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.026058] env[68674]: INFO nova.compute.manager [-] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Took 1.36 seconds to deallocate network for instance. [ 823.158906] env[68674]: DEBUG oslo_vmware.api [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521b9e59-ea2a-b84d-07fa-a1e3a51a22a3, 'name': SearchDatastore_Task, 'duration_secs': 0.025798} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.159275] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 823.159526] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 823.159771] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.159922] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 823.160114] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 823.160376] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e1870e6a-146f-47f0-b9bf-5a1038a24225 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.174958] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 823.175161] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 823.175987] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-869f1086-658f-4752-b4ba-61b0486a6cac {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.181046] env[68674]: DEBUG oslo_vmware.api [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 823.181046] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5289c629-3983-33ed-58a1-95c7da241f66" [ 823.181046] env[68674]: _type = "Task" [ 823.181046] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.189344] env[68674]: DEBUG oslo_vmware.api [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5289c629-3983-33ed-58a1-95c7da241f66, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.247544] env[68674]: DEBUG oslo_vmware.api [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240109, 'name': Rename_Task, 'duration_secs': 0.143239} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.248142] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 823.248641] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2e3926d0-bff9-4822-9438-0d3c760fad7e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.255597] env[68674]: DEBUG oslo_vmware.api [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 823.255597] env[68674]: value = "task-3240111" [ 823.255597] env[68674]: _type = "Task" [ 823.255597] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.263864] env[68674]: DEBUG oslo_vmware.api [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240111, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.366295] env[68674]: DEBUG oslo_vmware.api [None req-f7a8d801-f296-4d07-b34a-3eadcb23bc8e tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240110, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.414579] env[68674]: DEBUG nova.compute.manager [None req-5030da48-6cc2-434e-8529-ad1074d58be3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 823.414914] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5030da48-6cc2-434e-8529-ad1074d58be3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 823.416237] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4417344-968a-46a3-88c2-2d9159ce668d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.426423] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5030da48-6cc2-434e-8529-ad1074d58be3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 823.426805] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1f4fd011-c68d-4fef-ad20-e1c557f0bdb3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.434663] env[68674]: DEBUG oslo_vmware.api [None req-5030da48-6cc2-434e-8529-ad1074d58be3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Waiting for the task: (returnval){ [ 823.434663] env[68674]: value = "task-3240112" [ 823.434663] env[68674]: _type = "Task" [ 823.434663] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.440526] env[68674]: DEBUG nova.scheduler.client.report [None req-bca91cbe-540d-48a7-912a-2085c2495501 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 823.446251] env[68674]: DEBUG oslo_concurrency.lockutils [req-9e751272-e23a-4b94-ae1c-0ab646d1afdc req-e22dbc2c-4022-4a8d-873f-2c302b417540 service nova] Releasing lock "refresh_cache-e371ae6b-44fd-47ce-9c58-8981e7da5cbc" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 823.446647] env[68674]: DEBUG nova.compute.manager [req-9e751272-e23a-4b94-ae1c-0ab646d1afdc req-e22dbc2c-4022-4a8d-873f-2c302b417540 service nova] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Received event network-vif-deleted-0a9b5c9d-f85d-42f8-9e17-5a63a48e7ea4 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 823.446958] env[68674]: INFO nova.compute.manager [req-9e751272-e23a-4b94-ae1c-0ab646d1afdc req-e22dbc2c-4022-4a8d-873f-2c302b417540 service nova] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Neutron deleted interface 0a9b5c9d-f85d-42f8-9e17-5a63a48e7ea4; detaching it from the instance and deleting it from the info cache [ 823.447263] env[68674]: DEBUG nova.network.neutron [req-9e751272-e23a-4b94-ae1c-0ab646d1afdc req-e22dbc2c-4022-4a8d-873f-2c302b417540 service nova] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.457884] env[68674]: DEBUG oslo_vmware.api [None req-5030da48-6cc2-434e-8529-ad1074d58be3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3240112, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.535715] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 823.692643] env[68674]: DEBUG oslo_vmware.api [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5289c629-3983-33ed-58a1-95c7da241f66, 'name': SearchDatastore_Task, 'duration_secs': 0.044444} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.693667] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b00b00c-76cd-4930-8c1b-31c383110f6c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.699718] env[68674]: DEBUG oslo_vmware.api [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 823.699718] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52516aa3-ea45-c39b-1d21-9519e2abc670" [ 823.699718] env[68674]: _type = "Task" [ 823.699718] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.708626] env[68674]: DEBUG oslo_vmware.api [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52516aa3-ea45-c39b-1d21-9519e2abc670, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.769297] env[68674]: DEBUG oslo_vmware.api [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240111, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.866531] env[68674]: DEBUG oslo_vmware.api [None req-f7a8d801-f296-4d07-b34a-3eadcb23bc8e tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240110, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.945306] env[68674]: DEBUG oslo_vmware.api [None req-5030da48-6cc2-434e-8529-ad1074d58be3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3240112, 'name': PowerOffVM_Task, 'duration_secs': 0.393319} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.945306] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5030da48-6cc2-434e-8529-ad1074d58be3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 823.945306] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5030da48-6cc2-434e-8529-ad1074d58be3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 823.945569] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e14019c3-2637-47a0-8d60-3491788e8ac2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.950371] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bca91cbe-540d-48a7-912a-2085c2495501 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.137s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 823.952777] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a0c0196a-50df-4b97-81df-52758ae61f0e tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.280s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 823.953021] env[68674]: DEBUG nova.objects.instance [None req-a0c0196a-50df-4b97-81df-52758ae61f0e tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Lazy-loading 'resources' on Instance uuid 55727bbc-6b65-4e4c-ba4f-8240efbf052a {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 823.954656] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5a49b6fc-87dd-4773-9206-f013f66735e4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.965044] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1812d7ae-741b-4fae-99ea-9c4803a162a7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.976602] env[68674]: INFO nova.scheduler.client.report [None req-bca91cbe-540d-48a7-912a-2085c2495501 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Deleted allocations for instance 503e9328-bbd8-414f-8bea-250ed8247d67 [ 824.009421] env[68674]: DEBUG nova.compute.manager [req-9e751272-e23a-4b94-ae1c-0ab646d1afdc req-e22dbc2c-4022-4a8d-873f-2c302b417540 service nova] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Detach interface failed, port_id=0a9b5c9d-f85d-42f8-9e17-5a63a48e7ea4, reason: Instance 7329e503-d87d-4e15-b181-65ac6e376781 could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 824.016924] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5030da48-6cc2-434e-8529-ad1074d58be3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 824.017115] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5030da48-6cc2-434e-8529-ad1074d58be3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 824.018159] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-5030da48-6cc2-434e-8529-ad1074d58be3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Deleting the datastore file [datastore2] 5bd42044-84f5-4f48-aa97-b7cf990ed35d {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 824.018545] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-62421637-c023-41cc-bb07-8ccff66c6b0a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.026737] env[68674]: DEBUG oslo_vmware.api [None req-5030da48-6cc2-434e-8529-ad1074d58be3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Waiting for the task: (returnval){ [ 824.026737] env[68674]: value = "task-3240114" [ 824.026737] env[68674]: _type = "Task" [ 824.026737] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.040376] env[68674]: DEBUG oslo_vmware.api [None req-5030da48-6cc2-434e-8529-ad1074d58be3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3240114, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.211552] env[68674]: DEBUG oslo_vmware.api [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52516aa3-ea45-c39b-1d21-9519e2abc670, 'name': SearchDatastore_Task, 'duration_secs': 0.011758} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.212085] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 824.212214] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] e371ae6b-44fd-47ce-9c58-8981e7da5cbc/e371ae6b-44fd-47ce-9c58-8981e7da5cbc.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 824.212516] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e958aca7-95e7-44b3-89a5-c936d7bad245 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.220735] env[68674]: DEBUG oslo_vmware.api [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 824.220735] env[68674]: value = "task-3240115" [ 824.220735] env[68674]: _type = "Task" [ 824.220735] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.230033] env[68674]: DEBUG oslo_vmware.api [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240115, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.273944] env[68674]: DEBUG oslo_vmware.api [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240111, 'name': PowerOnVM_Task, 'duration_secs': 0.697429} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.274944] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 824.275328] env[68674]: INFO nova.compute.manager [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Took 8.69 seconds to spawn the instance on the hypervisor. [ 824.275681] env[68674]: DEBUG nova.compute.manager [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 824.277404] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74c648de-2a24-4636-833a-f6d088dc17fa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.369478] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Acquiring lock "040d7108-8da1-4914-b7fd-03cf09ec68aa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 824.369722] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Lock "040d7108-8da1-4914-b7fd-03cf09ec68aa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 824.371061] env[68674]: DEBUG oslo_vmware.api [None req-f7a8d801-f296-4d07-b34a-3eadcb23bc8e tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240110, 'name': PowerOnVM_Task, 'duration_secs': 1.27761} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.371898] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7a8d801-f296-4d07-b34a-3eadcb23bc8e tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 824.372141] env[68674]: DEBUG nova.compute.manager [None req-f7a8d801-f296-4d07-b34a-3eadcb23bc8e tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 824.372806] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34a02cfd-1d8b-4d78-a0ef-43c8e1e933f1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.493010] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bca91cbe-540d-48a7-912a-2085c2495501 tempest-VolumesAssistedSnapshotsTest-1742601098 tempest-VolumesAssistedSnapshotsTest-1742601098-project-member] Lock "503e9328-bbd8-414f-8bea-250ed8247d67" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 51.140s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 824.549795] env[68674]: DEBUG oslo_vmware.api [None req-5030da48-6cc2-434e-8529-ad1074d58be3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3240114, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.239668} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.549795] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-5030da48-6cc2-434e-8529-ad1074d58be3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 824.549795] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5030da48-6cc2-434e-8529-ad1074d58be3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 824.549795] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5030da48-6cc2-434e-8529-ad1074d58be3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 824.549795] env[68674]: INFO nova.compute.manager [None req-5030da48-6cc2-434e-8529-ad1074d58be3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Took 1.13 seconds to destroy the instance on the hypervisor. [ 824.549795] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5030da48-6cc2-434e-8529-ad1074d58be3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 824.549795] env[68674]: DEBUG nova.compute.manager [-] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 824.549795] env[68674]: DEBUG nova.network.neutron [-] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 824.618997] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8e233d15-f68b-4b2b-9269-261031cc8131 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "3d85c8c4-f09c-4f75-aff5-9a49d84ae006" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 824.619311] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8e233d15-f68b-4b2b-9269-261031cc8131 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "3d85c8c4-f09c-4f75-aff5-9a49d84ae006" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 824.619482] env[68674]: DEBUG nova.compute.manager [None req-8e233d15-f68b-4b2b-9269-261031cc8131 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 824.620653] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c677602-30c3-4870-88a9-ee4e15c4f831 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.636270] env[68674]: DEBUG nova.compute.manager [None req-8e233d15-f68b-4b2b-9269-261031cc8131 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68674) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 824.636270] env[68674]: DEBUG nova.objects.instance [None req-8e233d15-f68b-4b2b-9269-261031cc8131 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lazy-loading 'flavor' on Instance uuid 3d85c8c4-f09c-4f75-aff5-9a49d84ae006 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 824.737780] env[68674]: DEBUG oslo_vmware.api [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240115, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.817719] env[68674]: INFO nova.compute.manager [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Took 58.43 seconds to build instance. [ 824.872368] env[68674]: DEBUG nova.compute.manager [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 824.971127] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Acquiring lock "867fd9ca-049f-441a-94bc-af60df598043" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 824.971466] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Lock "867fd9ca-049f-441a-94bc-af60df598043" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 825.054105] env[68674]: DEBUG nova.compute.manager [req-f13d0d83-2e41-4b7a-ac5b-bfc39a8efbc9 req-c9052555-f50d-4ca7-89ea-18113123ad86 service nova] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Received event network-vif-deleted-a6b5f701-3612-4016-86eb-a45074df2088 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 825.054712] env[68674]: INFO nova.compute.manager [req-f13d0d83-2e41-4b7a-ac5b-bfc39a8efbc9 req-c9052555-f50d-4ca7-89ea-18113123ad86 service nova] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Neutron deleted interface a6b5f701-3612-4016-86eb-a45074df2088; detaching it from the instance and deleting it from the info cache [ 825.054712] env[68674]: DEBUG nova.network.neutron [req-f13d0d83-2e41-4b7a-ac5b-bfc39a8efbc9 req-c9052555-f50d-4ca7-89ea-18113123ad86 service nova] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.098907] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ffd1d67-8dcf-4392-b90e-25d1b9e949d8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.107261] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ea765f-da2f-4dd4-b470-f947245677ed {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.144761] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd58c0b4-5641-4bc6-866a-18d087566892 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.153593] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8149f4e3-a5c5-49c6-8ea6-4acc6650e701 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.168531] env[68674]: DEBUG nova.compute.provider_tree [None req-a0c0196a-50df-4b97-81df-52758ae61f0e tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 825.230684] env[68674]: DEBUG oslo_vmware.api [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240115, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.612678} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.230943] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] e371ae6b-44fd-47ce-9c58-8981e7da5cbc/e371ae6b-44fd-47ce-9c58-8981e7da5cbc.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 825.231206] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 825.231455] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4325bc6d-b455-4b28-ac1e-656fa1ed2207 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.239228] env[68674]: DEBUG oslo_vmware.api [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 825.239228] env[68674]: value = "task-3240116" [ 825.239228] env[68674]: _type = "Task" [ 825.239228] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.248292] env[68674]: DEBUG oslo_vmware.api [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240116, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.320849] env[68674]: DEBUG oslo_concurrency.lockutils [None req-faa6117e-dcc7-40de-a763-1504e92e2528 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "a4cb1632-eada-4b10-a66f-64fecf45fd76" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.198s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 825.395916] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 825.475150] env[68674]: DEBUG nova.compute.manager [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 825.487236] env[68674]: DEBUG nova.network.neutron [-] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.560146] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-be2dc8b2-2b57-46c0-b8db-092aa05f2a2b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.573185] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf0a396f-4a70-40bc-bccd-96fdccdd97dd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.611081] env[68674]: DEBUG nova.compute.manager [req-f13d0d83-2e41-4b7a-ac5b-bfc39a8efbc9 req-c9052555-f50d-4ca7-89ea-18113123ad86 service nova] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Detach interface failed, port_id=a6b5f701-3612-4016-86eb-a45074df2088, reason: Instance 5bd42044-84f5-4f48-aa97-b7cf990ed35d could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 825.649149] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e233d15-f68b-4b2b-9269-261031cc8131 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 825.649476] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dccd7641-8dd2-4881-9f92-47657830a646 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.656633] env[68674]: DEBUG oslo_vmware.api [None req-8e233d15-f68b-4b2b-9269-261031cc8131 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 825.656633] env[68674]: value = "task-3240117" [ 825.656633] env[68674]: _type = "Task" [ 825.656633] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.665704] env[68674]: DEBUG oslo_vmware.api [None req-8e233d15-f68b-4b2b-9269-261031cc8131 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240117, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.671990] env[68674]: DEBUG nova.scheduler.client.report [None req-a0c0196a-50df-4b97-81df-52758ae61f0e tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 825.748988] env[68674]: DEBUG oslo_vmware.api [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240116, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.176129} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.749322] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 825.750210] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c72f16fe-f323-4afe-bf0a-103c32177e03 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.773242] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Reconfiguring VM instance instance-0000003e to attach disk [datastore2] e371ae6b-44fd-47ce-9c58-8981e7da5cbc/e371ae6b-44fd-47ce-9c58-8981e7da5cbc.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 825.773498] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e8a72ca9-c38d-4f6a-815c-3c46e837cc05 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.795921] env[68674]: DEBUG oslo_vmware.api [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 825.795921] env[68674]: value = "task-3240118" [ 825.795921] env[68674]: _type = "Task" [ 825.795921] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.803751] env[68674]: DEBUG oslo_vmware.api [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240118, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.936530] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Acquiring lock "036fbca7-be6a-43c6-972e-a71524833498" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 825.936530] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Lock "036fbca7-be6a-43c6-972e-a71524833498" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 825.998072] env[68674]: INFO nova.compute.manager [-] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Took 1.45 seconds to deallocate network for instance. [ 826.010580] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 826.158829] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2ea0459d-f246-4fa8-9135-ce0c9af3f50e tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "a4cb1632-eada-4b10-a66f-64fecf45fd76" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 826.158829] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2ea0459d-f246-4fa8-9135-ce0c9af3f50e tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "a4cb1632-eada-4b10-a66f-64fecf45fd76" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 826.158829] env[68674]: DEBUG nova.compute.manager [None req-2ea0459d-f246-4fa8-9135-ce0c9af3f50e tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 826.158829] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71ea707a-f69d-4c2e-8e03-9adbfe345a28 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.168793] env[68674]: DEBUG nova.compute.manager [None req-2ea0459d-f246-4fa8-9135-ce0c9af3f50e tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68674) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 826.171018] env[68674]: DEBUG nova.objects.instance [None req-2ea0459d-f246-4fa8-9135-ce0c9af3f50e tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lazy-loading 'flavor' on Instance uuid a4cb1632-eada-4b10-a66f-64fecf45fd76 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 826.182162] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a0c0196a-50df-4b97-81df-52758ae61f0e tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.228s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.186786] env[68674]: DEBUG oslo_vmware.api [None req-8e233d15-f68b-4b2b-9269-261031cc8131 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240117, 'name': PowerOffVM_Task, 'duration_secs': 0.385993} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.190670] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 38.246s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 826.190931] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.191160] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68674) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 826.191524] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.485s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 826.193278] env[68674]: INFO nova.compute.claims [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 826.197034] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e233d15-f68b-4b2b-9269-261031cc8131 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 826.197034] env[68674]: DEBUG nova.compute.manager [None req-8e233d15-f68b-4b2b-9269-261031cc8131 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 826.200309] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dcf431a-0330-446c-8ddc-db73e70686e7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.208954] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7ad5218-ee47-4e72-b015-5b758e91da72 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.212898] env[68674]: INFO nova.scheduler.client.report [None req-a0c0196a-50df-4b97-81df-52758ae61f0e tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Deleted allocations for instance 55727bbc-6b65-4e4c-ba4f-8240efbf052a [ 826.228622] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4942bb4a-d6f7-431b-9e1f-965ce4c677f1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.246031] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc8c5487-9775-475b-930e-b12fa130a195 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.253191] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85486abf-753c-4a40-97f6-5c0ef4b0b56a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.283793] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178171MB free_disk=119GB free_vcpus=48 pci_devices=None {{(pid=68674) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 826.283945] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 826.304713] env[68674]: DEBUG oslo_vmware.api [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240118, 'name': ReconfigVM_Task, 'duration_secs': 0.283229} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.304979] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Reconfigured VM instance instance-0000003e to attach disk [datastore2] e371ae6b-44fd-47ce-9c58-8981e7da5cbc/e371ae6b-44fd-47ce-9c58-8981e7da5cbc.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 826.305738] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9a12d644-0aa8-4339-ae95-a0a36f94b670 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.330472] env[68674]: DEBUG oslo_vmware.api [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 826.330472] env[68674]: value = "task-3240119" [ 826.330472] env[68674]: _type = "Task" [ 826.330472] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.338751] env[68674]: DEBUG oslo_vmware.api [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240119, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.441340] env[68674]: DEBUG nova.compute.manager [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 826.505669] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5030da48-6cc2-434e-8529-ad1074d58be3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 826.717353] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ea0459d-f246-4fa8-9135-ce0c9af3f50e tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 826.717676] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9ec7661c-0660-4d35-adc6-0d7fa6601dcc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.723454] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a0c0196a-50df-4b97-81df-52758ae61f0e tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Lock "55727bbc-6b65-4e4c-ba4f-8240efbf052a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.988s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.731088] env[68674]: DEBUG oslo_vmware.api [None req-2ea0459d-f246-4fa8-9135-ce0c9af3f50e tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 826.731088] env[68674]: value = "task-3240120" [ 826.731088] env[68674]: _type = "Task" [ 826.731088] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.738928] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8e233d15-f68b-4b2b-9269-261031cc8131 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "3d85c8c4-f09c-4f75-aff5-9a49d84ae006" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.119s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.751023] env[68674]: DEBUG oslo_vmware.api [None req-2ea0459d-f246-4fa8-9135-ce0c9af3f50e tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240120, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.848028] env[68674]: DEBUG oslo_vmware.api [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240119, 'name': Rename_Task, 'duration_secs': 0.176291} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.848360] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 826.848621] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-85bf22ba-5d9d-4236-9845-1271c5fb5b83 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.855522] env[68674]: DEBUG oslo_vmware.api [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 826.855522] env[68674]: value = "task-3240121" [ 826.855522] env[68674]: _type = "Task" [ 826.855522] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.865114] env[68674]: DEBUG oslo_vmware.api [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240121, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.970786] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 827.243459] env[68674]: DEBUG oslo_vmware.api [None req-2ea0459d-f246-4fa8-9135-ce0c9af3f50e tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240120, 'name': PowerOffVM_Task, 'duration_secs': 0.320467} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.247761] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ea0459d-f246-4fa8-9135-ce0c9af3f50e tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 827.248058] env[68674]: DEBUG nova.compute.manager [None req-2ea0459d-f246-4fa8-9135-ce0c9af3f50e tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 827.249052] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c15914fe-de72-4c67-8073-2bba2afeff30 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.369541] env[68674]: DEBUG oslo_vmware.api [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240121, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.679695] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-914f93cd-437d-40fa-86f4-b12c119fbfb4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.687906] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b26ed1-c4d7-416f-adb1-45fab717c5f5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.721768] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2f89bd0-2fda-4e48-96ea-40163d339961 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.727187] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa713925-670f-419b-8d23-823f91665c21 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.745197] env[68674]: DEBUG nova.compute.provider_tree [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 827.766150] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2ea0459d-f246-4fa8-9135-ce0c9af3f50e tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "a4cb1632-eada-4b10-a66f-64fecf45fd76" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.609s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 827.867423] env[68674]: DEBUG oslo_vmware.api [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240121, 'name': PowerOnVM_Task, 'duration_secs': 0.664417} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.867885] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 827.868222] env[68674]: INFO nova.compute.manager [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Took 8.27 seconds to spawn the instance on the hypervisor. [ 827.868523] env[68674]: DEBUG nova.compute.manager [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 827.869555] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e1a5cee-e441-4ad1-ad00-f0c89b74b5d6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.248289] env[68674]: DEBUG nova.scheduler.client.report [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 828.396564] env[68674]: INFO nova.compute.manager [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Took 59.62 seconds to build instance. [ 828.412423] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquiring lock "7d953e59-53c1-4041-a641-35c12c012f7e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 828.412704] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lock "7d953e59-53c1-4041-a641-35c12c012f7e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 828.486906] env[68674]: DEBUG nova.compute.manager [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Stashing vm_state: stopped {{(pid=68674) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 828.757237] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.565s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 828.757364] env[68674]: DEBUG nova.compute.manager [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 828.764673] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8b93b481-0c89-4ea8-bca5-47a851d7abf4 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.172s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 828.764673] env[68674]: DEBUG nova.objects.instance [None req-8b93b481-0c89-4ea8-bca5-47a851d7abf4 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Lazy-loading 'resources' on Instance uuid 2007222e-e4e5-44b3-bd9e-55b4a2143c3e {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 828.795754] env[68674]: DEBUG oslo_concurrency.lockutils [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Acquiring lock "f147b483-9384-4fc1-996e-e8fb035c1942" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 828.796632] env[68674]: DEBUG oslo_concurrency.lockutils [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Lock "f147b483-9384-4fc1-996e-e8fb035c1942" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 828.901141] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5a4c43e4-cc6b-43c9-8302-8842d990450f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "e371ae6b-44fd-47ce-9c58-8981e7da5cbc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.981s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 829.006826] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 829.139396] env[68674]: INFO nova.compute.manager [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Rebuilding instance [ 829.196729] env[68674]: DEBUG nova.compute.manager [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 829.200030] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-644d4fef-0f08-4037-9a29-d09dcaa7aa02 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.268033] env[68674]: DEBUG nova.compute.utils [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 829.270521] env[68674]: DEBUG nova.compute.manager [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 829.272388] env[68674]: DEBUG nova.network.neutron [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 829.328148] env[68674]: DEBUG nova.policy [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a872f1b7c99f48bd821993386afdf84f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '22d2d9d6bfcd4f219b02b1356b36123e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 829.403031] env[68674]: DEBUG nova.compute.manager [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 829.680505] env[68674]: DEBUG nova.network.neutron [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Successfully created port: d24d2f1b-cc82-45a9-8d5c-94505a4de39f {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 829.773171] env[68674]: DEBUG nova.compute.manager [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 829.844524] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b9ad371-8335-461c-bf33-176fc4e6ab5a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.852082] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34df90e2-c0ea-4db6-b602-d0eecdcc7251 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.886390] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f4e1bd1-98df-4123-8db2-489ac40dc163 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.894489] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da246961-d510-4eb9-82bb-a884ba81bff8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.912515] env[68674]: DEBUG nova.compute.provider_tree [None req-8b93b481-0c89-4ea8-bca5-47a851d7abf4 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 829.930094] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 830.025226] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "f6d28c5e-fe32-4c53-98ac-747a1b79e6c4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 830.025472] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "f6d28c5e-fe32-4c53-98ac-747a1b79e6c4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 830.219033] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 830.219033] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a2695c55-57cd-4447-b392-5a86906b9988 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.232027] env[68674]: DEBUG oslo_vmware.api [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 830.232027] env[68674]: value = "task-3240122" [ 830.232027] env[68674]: _type = "Task" [ 830.232027] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.242771] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] VM already powered off {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 830.244293] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 830.244293] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87a4e6e5-14a9-43bc-ade0-4ccf23f33b20 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.254895] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 830.254895] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d9b97a12-e497-4e8c-acf7-d45d7c5b6c0d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.331777] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 830.331777] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 830.331777] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Deleting the datastore file [datastore2] a4cb1632-eada-4b10-a66f-64fecf45fd76 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 830.331777] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9dcb383f-d3d1-4106-84af-2fa935b05a38 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.338055] env[68674]: DEBUG oslo_vmware.api [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 830.338055] env[68674]: value = "task-3240124" [ 830.338055] env[68674]: _type = "Task" [ 830.338055] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.345707] env[68674]: DEBUG oslo_vmware.api [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240124, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.422018] env[68674]: DEBUG nova.scheduler.client.report [None req-8b93b481-0c89-4ea8-bca5-47a851d7abf4 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 830.788385] env[68674]: DEBUG nova.compute.manager [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 830.825655] env[68674]: DEBUG nova.virt.hardware [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:10:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='0763b137-0ce8-4652-8505-6b8377dc2900',id=39,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-684543547',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 830.825939] env[68674]: DEBUG nova.virt.hardware [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 830.826115] env[68674]: DEBUG nova.virt.hardware [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 830.826302] env[68674]: DEBUG nova.virt.hardware [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 830.826439] env[68674]: DEBUG nova.virt.hardware [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 830.826871] env[68674]: DEBUG nova.virt.hardware [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 830.826871] env[68674]: DEBUG nova.virt.hardware [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 830.826994] env[68674]: DEBUG nova.virt.hardware [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 830.827143] env[68674]: DEBUG nova.virt.hardware [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 830.827271] env[68674]: DEBUG nova.virt.hardware [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 830.827436] env[68674]: DEBUG nova.virt.hardware [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 830.828344] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69c59bbf-bd2c-4d55-b86e-bccdedb36b6b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.837483] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70d18cde-e18f-4fa0-8c39-da8497477ff8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.857200] env[68674]: DEBUG oslo_vmware.api [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240124, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.31296} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.857200] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 830.857200] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 830.857531] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 830.926023] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8b93b481-0c89-4ea8-bca5-47a851d7abf4 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.163s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 830.928775] env[68674]: DEBUG oslo_concurrency.lockutils [None req-105f777e-ea9b-472b-87dc-e37a42d58f69 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.014s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 830.928857] env[68674]: DEBUG nova.objects.instance [None req-105f777e-ea9b-472b-87dc-e37a42d58f69 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lazy-loading 'resources' on Instance uuid 045e54ff-9e2c-4b04-afac-34cb6580cb2c {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 830.954470] env[68674]: INFO nova.scheduler.client.report [None req-8b93b481-0c89-4ea8-bca5-47a851d7abf4 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Deleted allocations for instance 2007222e-e4e5-44b3-bd9e-55b4a2143c3e [ 831.250472] env[68674]: DEBUG nova.compute.manager [req-8cca9deb-dc94-4828-bf4b-9a7dae608633 req-2e77dbd0-a84f-41b4-b1f1-05b74db8eec9 service nova] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Received event network-vif-plugged-d24d2f1b-cc82-45a9-8d5c-94505a4de39f {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 831.251649] env[68674]: DEBUG oslo_concurrency.lockutils [req-8cca9deb-dc94-4828-bf4b-9a7dae608633 req-2e77dbd0-a84f-41b4-b1f1-05b74db8eec9 service nova] Acquiring lock "7aa58e2f-1202-4252-9c38-ce53084c573f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 831.251649] env[68674]: DEBUG oslo_concurrency.lockutils [req-8cca9deb-dc94-4828-bf4b-9a7dae608633 req-2e77dbd0-a84f-41b4-b1f1-05b74db8eec9 service nova] Lock "7aa58e2f-1202-4252-9c38-ce53084c573f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 831.251649] env[68674]: DEBUG oslo_concurrency.lockutils [req-8cca9deb-dc94-4828-bf4b-9a7dae608633 req-2e77dbd0-a84f-41b4-b1f1-05b74db8eec9 service nova] Lock "7aa58e2f-1202-4252-9c38-ce53084c573f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 831.251649] env[68674]: DEBUG nova.compute.manager [req-8cca9deb-dc94-4828-bf4b-9a7dae608633 req-2e77dbd0-a84f-41b4-b1f1-05b74db8eec9 service nova] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] No waiting events found dispatching network-vif-plugged-d24d2f1b-cc82-45a9-8d5c-94505a4de39f {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 831.251649] env[68674]: WARNING nova.compute.manager [req-8cca9deb-dc94-4828-bf4b-9a7dae608633 req-2e77dbd0-a84f-41b4-b1f1-05b74db8eec9 service nova] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Received unexpected event network-vif-plugged-d24d2f1b-cc82-45a9-8d5c-94505a4de39f for instance with vm_state building and task_state spawning. [ 832.053831] env[68674]: DEBUG nova.network.neutron [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Successfully updated port: d24d2f1b-cc82-45a9-8d5c-94505a4de39f {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 832.067039] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8b93b481-0c89-4ea8-bca5-47a851d7abf4 tempest-ServerRescueTestJSONUnderV235-1851228162 tempest-ServerRescueTestJSONUnderV235-1851228162-project-member] Lock "2007222e-e4e5-44b3-bd9e-55b4a2143c3e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.311s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.566904] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquiring lock "refresh_cache-7aa58e2f-1202-4252-9c38-ce53084c573f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.566904] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquired lock "refresh_cache-7aa58e2f-1202-4252-9c38-ce53084c573f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 832.567222] env[68674]: DEBUG nova.network.neutron [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 832.588164] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7509db87-d57d-4085-b4b9-9a952c0ea9ce {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.597853] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5873618-5f99-4b31-819b-fe28cbf87127 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.642649] env[68674]: DEBUG nova.virt.hardware [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 832.642649] env[68674]: DEBUG nova.virt.hardware [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 832.642649] env[68674]: DEBUG nova.virt.hardware [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 832.642649] env[68674]: DEBUG nova.virt.hardware [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 832.642649] env[68674]: DEBUG nova.virt.hardware [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 832.642649] env[68674]: DEBUG nova.virt.hardware [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 832.642649] env[68674]: DEBUG nova.virt.hardware [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 832.642649] env[68674]: DEBUG nova.virt.hardware [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 832.642649] env[68674]: DEBUG nova.virt.hardware [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 832.642649] env[68674]: DEBUG nova.virt.hardware [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 832.642649] env[68674]: DEBUG nova.virt.hardware [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 832.642649] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c9d5554-1392-46ec-94d1-e639cbf1aefe {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.645923] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2520970b-169e-46bb-9791-c69ba83c4f03 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.655710] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36a46400-634d-4e7c-9cba-e4ac516a0eda {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.660593] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4e1c5fe-509b-4e58-91c0-881e51056f04 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.676125] env[68674]: DEBUG nova.compute.provider_tree [None req-105f777e-ea9b-472b-87dc-e37a42d58f69 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 832.685571] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8a:45:1b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b98c49ac-0eb7-4311-aa8f-60581b2ce706', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '14aed01d-1948-4a24-8075-fa23078ec7a6', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 832.692503] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 832.694163] env[68674]: DEBUG nova.scheduler.client.report [None req-105f777e-ea9b-472b-87dc-e37a42d58f69 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 832.697393] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 832.697846] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-71d954c9-7fa6-4876-aab3-6a30702f6f78 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.720117] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 832.720117] env[68674]: value = "task-3240125" [ 832.720117] env[68674]: _type = "Task" [ 832.720117] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.727636] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240125, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.132932] env[68674]: DEBUG nova.network.neutron [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 833.200000] env[68674]: DEBUG oslo_concurrency.lockutils [None req-105f777e-ea9b-472b-87dc-e37a42d58f69 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.271s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 833.202509] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.432s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 833.204401] env[68674]: INFO nova.compute.claims [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 833.225043] env[68674]: INFO nova.scheduler.client.report [None req-105f777e-ea9b-472b-87dc-e37a42d58f69 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Deleted allocations for instance 045e54ff-9e2c-4b04-afac-34cb6580cb2c [ 833.236489] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240125, 'name': CreateVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.296172] env[68674]: DEBUG nova.compute.manager [req-07bb4788-6dcd-40b8-b64c-6572aec66f42 req-49cb1b91-8b08-4f95-a3b5-0d97a30b7d81 service nova] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Received event network-changed-d24d2f1b-cc82-45a9-8d5c-94505a4de39f {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 833.296490] env[68674]: DEBUG nova.compute.manager [req-07bb4788-6dcd-40b8-b64c-6572aec66f42 req-49cb1b91-8b08-4f95-a3b5-0d97a30b7d81 service nova] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Refreshing instance network info cache due to event network-changed-d24d2f1b-cc82-45a9-8d5c-94505a4de39f. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 833.296727] env[68674]: DEBUG oslo_concurrency.lockutils [req-07bb4788-6dcd-40b8-b64c-6572aec66f42 req-49cb1b91-8b08-4f95-a3b5-0d97a30b7d81 service nova] Acquiring lock "refresh_cache-7aa58e2f-1202-4252-9c38-ce53084c573f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.336936] env[68674]: DEBUG nova.network.neutron [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Updating instance_info_cache with network_info: [{"id": "d24d2f1b-cc82-45a9-8d5c-94505a4de39f", "address": "fa:16:3e:b3:08:90", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.109", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd24d2f1b-cc", "ovs_interfaceid": "d24d2f1b-cc82-45a9-8d5c-94505a4de39f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.644104] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Acquiring lock "1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 833.644383] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Lock "1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 833.732688] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240125, 'name': CreateVM_Task, 'duration_secs': 0.598561} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.732846] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 833.733539] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.733728] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 833.734208] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 833.734290] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8a3d1c9-1b6e-4a2a-b894-58f6f68b4114 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.741145] env[68674]: DEBUG oslo_vmware.api [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 833.741145] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52718b1b-66a4-db5d-1e21-54b0035d4b45" [ 833.741145] env[68674]: _type = "Task" [ 833.741145] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.753076] env[68674]: DEBUG oslo_concurrency.lockutils [None req-105f777e-ea9b-472b-87dc-e37a42d58f69 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "045e54ff-9e2c-4b04-afac-34cb6580cb2c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.785s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 833.758020] env[68674]: DEBUG oslo_vmware.api [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52718b1b-66a4-db5d-1e21-54b0035d4b45, 'name': SearchDatastore_Task, 'duration_secs': 0.013916} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.758558] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 833.758929] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 833.759188] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.759503] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 833.759503] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 833.759762] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e87bf807-545f-4ef3-8103-b0d9eb6c3646 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.768281] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 833.768473] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 833.769561] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94250255-4a7f-4275-af2b-a003c922ca52 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.775713] env[68674]: DEBUG oslo_vmware.api [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 833.775713] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528bda4d-100b-efe4-29a3-b8c998abede5" [ 833.775713] env[68674]: _type = "Task" [ 833.775713] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.784273] env[68674]: DEBUG oslo_vmware.api [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528bda4d-100b-efe4-29a3-b8c998abede5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.840097] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Releasing lock "refresh_cache-7aa58e2f-1202-4252-9c38-ce53084c573f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 833.840323] env[68674]: DEBUG nova.compute.manager [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Instance network_info: |[{"id": "d24d2f1b-cc82-45a9-8d5c-94505a4de39f", "address": "fa:16:3e:b3:08:90", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.109", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd24d2f1b-cc", "ovs_interfaceid": "d24d2f1b-cc82-45a9-8d5c-94505a4de39f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 833.840624] env[68674]: DEBUG oslo_concurrency.lockutils [req-07bb4788-6dcd-40b8-b64c-6572aec66f42 req-49cb1b91-8b08-4f95-a3b5-0d97a30b7d81 service nova] Acquired lock "refresh_cache-7aa58e2f-1202-4252-9c38-ce53084c573f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 833.840795] env[68674]: DEBUG nova.network.neutron [req-07bb4788-6dcd-40b8-b64c-6572aec66f42 req-49cb1b91-8b08-4f95-a3b5-0d97a30b7d81 service nova] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Refreshing network info cache for port d24d2f1b-cc82-45a9-8d5c-94505a4de39f {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 833.842233] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b3:08:90', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'df1bf911-aac9-4d2d-ae69-66ace3e6a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd24d2f1b-cc82-45a9-8d5c-94505a4de39f', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 833.850337] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 833.853099] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 833.853445] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a68fe8d8-0079-4b33-98bf-3206caec49b5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.879769] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 833.879769] env[68674]: value = "task-3240126" [ 833.879769] env[68674]: _type = "Task" [ 833.879769] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.887727] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240126, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.146326] env[68674]: DEBUG nova.network.neutron [req-07bb4788-6dcd-40b8-b64c-6572aec66f42 req-49cb1b91-8b08-4f95-a3b5-0d97a30b7d81 service nova] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Updated VIF entry in instance network info cache for port d24d2f1b-cc82-45a9-8d5c-94505a4de39f. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 834.146688] env[68674]: DEBUG nova.network.neutron [req-07bb4788-6dcd-40b8-b64c-6572aec66f42 req-49cb1b91-8b08-4f95-a3b5-0d97a30b7d81 service nova] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Updating instance_info_cache with network_info: [{"id": "d24d2f1b-cc82-45a9-8d5c-94505a4de39f", "address": "fa:16:3e:b3:08:90", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.109", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd24d2f1b-cc", "ovs_interfaceid": "d24d2f1b-cc82-45a9-8d5c-94505a4de39f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.288186] env[68674]: DEBUG oslo_vmware.api [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528bda4d-100b-efe4-29a3-b8c998abede5, 'name': SearchDatastore_Task, 'duration_secs': 0.01039} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.288976] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed38b524-0e36-4079-a468-d69891b448f3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.294598] env[68674]: DEBUG oslo_vmware.api [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 834.294598] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5278e539-ec07-a85f-2ccc-2984bfc886ff" [ 834.294598] env[68674]: _type = "Task" [ 834.294598] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.301696] env[68674]: DEBUG oslo_vmware.api [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5278e539-ec07-a85f-2ccc-2984bfc886ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.390848] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240126, 'name': CreateVM_Task, 'duration_secs': 0.374038} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.390994] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 834.391627] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.391798] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 834.392274] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 834.392433] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68840f90-e224-4bf5-b014-e58b81e92981 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.397154] env[68674]: DEBUG oslo_vmware.api [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 834.397154] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]529c6296-4095-c08b-9217-503fc2683ef7" [ 834.397154] env[68674]: _type = "Task" [ 834.397154] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.407398] env[68674]: DEBUG oslo_vmware.api [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]529c6296-4095-c08b-9217-503fc2683ef7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.649727] env[68674]: DEBUG oslo_concurrency.lockutils [req-07bb4788-6dcd-40b8-b64c-6572aec66f42 req-49cb1b91-8b08-4f95-a3b5-0d97a30b7d81 service nova] Releasing lock "refresh_cache-7aa58e2f-1202-4252-9c38-ce53084c573f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.671420] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f758d8a-b4c5-4d3d-bc9d-089438aef42d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.680095] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1132ea9-bb4a-457a-b528-709c0cf98729 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.714671] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9feba5e-1eb5-4172-8673-0d655bd7c10e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.724026] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d850c09c-22c6-400e-b259-24e9af3dbefb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.738371] env[68674]: DEBUG nova.compute.provider_tree [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 834.805582] env[68674]: DEBUG oslo_vmware.api [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5278e539-ec07-a85f-2ccc-2984bfc886ff, 'name': SearchDatastore_Task, 'duration_secs': 0.010812} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.805913] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.806198] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] a4cb1632-eada-4b10-a66f-64fecf45fd76/a4cb1632-eada-4b10-a66f-64fecf45fd76.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 834.806456] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-59d113a0-5774-4c05-83be-f3e3129bc1bc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.813835] env[68674]: DEBUG oslo_vmware.api [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 834.813835] env[68674]: value = "task-3240127" [ 834.813835] env[68674]: _type = "Task" [ 834.813835] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.822134] env[68674]: DEBUG oslo_vmware.api [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240127, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.909903] env[68674]: DEBUG oslo_vmware.api [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]529c6296-4095-c08b-9217-503fc2683ef7, 'name': SearchDatastore_Task, 'duration_secs': 0.033643} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.910256] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.910529] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 834.910806] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.910965] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 834.911181] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 834.911457] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b883f6a5-5122-4d5c-972d-6dfec9d85072 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.919030] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 834.919233] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 834.919995] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3efdc9d-e85e-4af1-9413-bcf4ead248d0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.926122] env[68674]: DEBUG oslo_vmware.api [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 834.926122] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5247659b-fc30-cab1-1aa8-7b08ae3dd6d0" [ 834.926122] env[68674]: _type = "Task" [ 834.926122] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.934698] env[68674]: DEBUG oslo_vmware.api [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5247659b-fc30-cab1-1aa8-7b08ae3dd6d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.244227] env[68674]: DEBUG nova.scheduler.client.report [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 835.323725] env[68674]: DEBUG oslo_vmware.api [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240127, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.455847} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.324014] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] a4cb1632-eada-4b10-a66f-64fecf45fd76/a4cb1632-eada-4b10-a66f-64fecf45fd76.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 835.324238] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 835.324525] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8cea7801-0330-4866-9503-fb578fd5a1fb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.331713] env[68674]: DEBUG oslo_vmware.api [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 835.331713] env[68674]: value = "task-3240128" [ 835.331713] env[68674]: _type = "Task" [ 835.331713] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.340548] env[68674]: DEBUG oslo_vmware.api [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240128, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.438057] env[68674]: DEBUG oslo_vmware.api [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5247659b-fc30-cab1-1aa8-7b08ae3dd6d0, 'name': SearchDatastore_Task, 'duration_secs': 0.008966} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.438867] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a655cff7-17c9-445f-8bbf-b7521a75b96f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.444743] env[68674]: DEBUG oslo_vmware.api [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 835.444743] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524d22e6-25ff-f4ae-c305-1001559289c7" [ 835.444743] env[68674]: _type = "Task" [ 835.444743] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.453946] env[68674]: DEBUG oslo_vmware.api [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524d22e6-25ff-f4ae-c305-1001559289c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.749908] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.547s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 835.751046] env[68674]: DEBUG nova.compute.manager [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 835.753113] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.965s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 835.754642] env[68674]: INFO nova.compute.claims [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 835.841322] env[68674]: DEBUG oslo_vmware.api [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240128, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.13632} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.841602] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 835.842424] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1bccd4b-c80f-4df9-a909-25482442028d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.864298] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Reconfiguring VM instance instance-0000003d to attach disk [datastore2] a4cb1632-eada-4b10-a66f-64fecf45fd76/a4cb1632-eada-4b10-a66f-64fecf45fd76.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 835.864582] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c202f5c2-74b3-4543-a24f-e819edd33573 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.884473] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "0e7c5243-ad33-4391-8977-b9019643e3de" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 835.884725] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "0e7c5243-ad33-4391-8977-b9019643e3de" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 835.888492] env[68674]: DEBUG oslo_vmware.api [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 835.888492] env[68674]: value = "task-3240129" [ 835.888492] env[68674]: _type = "Task" [ 835.888492] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.896528] env[68674]: DEBUG oslo_vmware.api [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240129, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.954400] env[68674]: DEBUG oslo_vmware.api [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524d22e6-25ff-f4ae-c305-1001559289c7, 'name': SearchDatastore_Task, 'duration_secs': 0.00878} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.954697] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 835.955691] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 7aa58e2f-1202-4252-9c38-ce53084c573f/7aa58e2f-1202-4252-9c38-ce53084c573f.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 835.955691] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2e307bf6-0952-4905-a039-dacdbd1c0fa2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.961800] env[68674]: DEBUG oslo_vmware.api [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 835.961800] env[68674]: value = "task-3240130" [ 835.961800] env[68674]: _type = "Task" [ 835.961800] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.969396] env[68674]: DEBUG oslo_vmware.api [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240130, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.262296] env[68674]: DEBUG nova.compute.utils [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 836.264033] env[68674]: DEBUG nova.compute.manager [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 836.264157] env[68674]: DEBUG nova.network.neutron [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 836.303301] env[68674]: DEBUG nova.policy [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '61e4c79ab4ff4f149992c5542d3f6ab7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b36ad45ac45e43f9880778908b6ffdf1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 836.400867] env[68674]: DEBUG oslo_vmware.api [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240129, 'name': ReconfigVM_Task, 'duration_secs': 0.27945} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.401159] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Reconfigured VM instance instance-0000003d to attach disk [datastore2] a4cb1632-eada-4b10-a66f-64fecf45fd76/a4cb1632-eada-4b10-a66f-64fecf45fd76.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 836.402251] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ea465587-b3f0-4c64-a3a0-e4afae103ba9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.409332] env[68674]: DEBUG oslo_vmware.api [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 836.409332] env[68674]: value = "task-3240131" [ 836.409332] env[68674]: _type = "Task" [ 836.409332] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.417749] env[68674]: DEBUG oslo_vmware.api [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240131, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.471492] env[68674]: DEBUG oslo_vmware.api [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240130, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.460905} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.471769] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 7aa58e2f-1202-4252-9c38-ce53084c573f/7aa58e2f-1202-4252-9c38-ce53084c573f.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 836.471985] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 836.472285] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2394b31c-a9c7-431e-a390-f915f92dee32 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.478637] env[68674]: DEBUG oslo_vmware.api [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 836.478637] env[68674]: value = "task-3240132" [ 836.478637] env[68674]: _type = "Task" [ 836.478637] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.487152] env[68674]: DEBUG oslo_vmware.api [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240132, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.629485] env[68674]: DEBUG nova.network.neutron [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Successfully created port: e952a804-62fd-4a82-b6de-457e739b719a {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 836.767840] env[68674]: DEBUG nova.compute.manager [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 836.919225] env[68674]: DEBUG oslo_vmware.api [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240131, 'name': Rename_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.991049] env[68674]: DEBUG oslo_vmware.api [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240132, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067646} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.993656] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 836.994727] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57074f7b-c33a-4203-a3dc-1469707a56e1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.017802] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Reconfiguring VM instance instance-0000003f to attach disk [datastore2] 7aa58e2f-1202-4252-9c38-ce53084c573f/7aa58e2f-1202-4252-9c38-ce53084c573f.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 837.020346] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b0accac2-6db6-4e50-8c5a-bac101cf1eb1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.040180] env[68674]: DEBUG oslo_vmware.api [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 837.040180] env[68674]: value = "task-3240133" [ 837.040180] env[68674]: _type = "Task" [ 837.040180] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.050164] env[68674]: DEBUG oslo_vmware.api [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240133, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.258066] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ec44ff9-f702-4d44-9110-0b754ffcd123 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.265655] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-944126f8-7f5e-4673-adc6-f265a1c9e0f2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.299653] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb381205-a471-4735-9bc0-3d697ba3180c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.307161] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb7a305a-c1a4-4852-96fe-f3a3e42aae6a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.324102] env[68674]: DEBUG nova.compute.provider_tree [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 837.419936] env[68674]: DEBUG oslo_vmware.api [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240131, 'name': Rename_Task, 'duration_secs': 0.876913} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.420238] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 837.420476] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eb458f2d-dc38-435e-8c70-3377e534702e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.427156] env[68674]: DEBUG oslo_vmware.api [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 837.427156] env[68674]: value = "task-3240134" [ 837.427156] env[68674]: _type = "Task" [ 837.427156] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.434650] env[68674]: DEBUG oslo_vmware.api [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240134, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.549402] env[68674]: DEBUG oslo_vmware.api [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240133, 'name': ReconfigVM_Task, 'duration_secs': 0.276795} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.549677] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Reconfigured VM instance instance-0000003f to attach disk [datastore2] 7aa58e2f-1202-4252-9c38-ce53084c573f/7aa58e2f-1202-4252-9c38-ce53084c573f.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 837.550342] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2b93f8d0-4c77-4617-a8b6-187525788039 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.557132] env[68674]: DEBUG oslo_vmware.api [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 837.557132] env[68674]: value = "task-3240135" [ 837.557132] env[68674]: _type = "Task" [ 837.557132] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.565179] env[68674]: DEBUG oslo_vmware.api [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240135, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.800539] env[68674]: DEBUG nova.compute.manager [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 837.827013] env[68674]: DEBUG nova.virt.hardware [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 837.827321] env[68674]: DEBUG nova.virt.hardware [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 837.827497] env[68674]: DEBUG nova.virt.hardware [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 837.827680] env[68674]: DEBUG nova.virt.hardware [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 837.827825] env[68674]: DEBUG nova.virt.hardware [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 837.827970] env[68674]: DEBUG nova.virt.hardware [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 837.828197] env[68674]: DEBUG nova.virt.hardware [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 837.828411] env[68674]: DEBUG nova.virt.hardware [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 837.828587] env[68674]: DEBUG nova.virt.hardware [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 837.828753] env[68674]: DEBUG nova.virt.hardware [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 837.828923] env[68674]: DEBUG nova.virt.hardware [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 837.829745] env[68674]: DEBUG nova.scheduler.client.report [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 837.833673] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e25ff4c-694e-491d-941e-5131d2456878 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.841965] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cfc3c80-bfce-4cbc-bc16-e0ab9e89222b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.937503] env[68674]: DEBUG oslo_vmware.api [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240134, 'name': PowerOnVM_Task, 'duration_secs': 0.468042} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.937733] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 837.937938] env[68674]: DEBUG nova.compute.manager [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 837.938742] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f38caf-3bfd-418e-abb7-9ad27c5c7af9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.069040] env[68674]: DEBUG oslo_vmware.api [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240135, 'name': Rename_Task, 'duration_secs': 0.140347} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.069040] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 838.069040] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-106eab74-5b11-4815-a03a-0946f011e25c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.073730] env[68674]: DEBUG oslo_vmware.api [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 838.073730] env[68674]: value = "task-3240136" [ 838.073730] env[68674]: _type = "Task" [ 838.073730] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.085671] env[68674]: DEBUG oslo_vmware.api [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240136, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.339281] env[68674]: DEBUG nova.compute.manager [req-94c846ba-c44f-4ee7-a99e-c9e3f4eddff8 req-2a1b9358-3c67-4a57-961a-00b493952df9 service nova] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Received event network-vif-plugged-e952a804-62fd-4a82-b6de-457e739b719a {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 838.339281] env[68674]: DEBUG oslo_concurrency.lockutils [req-94c846ba-c44f-4ee7-a99e-c9e3f4eddff8 req-2a1b9358-3c67-4a57-961a-00b493952df9 service nova] Acquiring lock "2ed83aff-9a73-464b-914a-479d91cdfce0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 838.339281] env[68674]: DEBUG oslo_concurrency.lockutils [req-94c846ba-c44f-4ee7-a99e-c9e3f4eddff8 req-2a1b9358-3c67-4a57-961a-00b493952df9 service nova] Lock "2ed83aff-9a73-464b-914a-479d91cdfce0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 838.339281] env[68674]: DEBUG oslo_concurrency.lockutils [req-94c846ba-c44f-4ee7-a99e-c9e3f4eddff8 req-2a1b9358-3c67-4a57-961a-00b493952df9 service nova] Lock "2ed83aff-9a73-464b-914a-479d91cdfce0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 838.339281] env[68674]: DEBUG nova.compute.manager [req-94c846ba-c44f-4ee7-a99e-c9e3f4eddff8 req-2a1b9358-3c67-4a57-961a-00b493952df9 service nova] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] No waiting events found dispatching network-vif-plugged-e952a804-62fd-4a82-b6de-457e739b719a {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 838.339281] env[68674]: WARNING nova.compute.manager [req-94c846ba-c44f-4ee7-a99e-c9e3f4eddff8 req-2a1b9358-3c67-4a57-961a-00b493952df9 service nova] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Received unexpected event network-vif-plugged-e952a804-62fd-4a82-b6de-457e739b719a for instance with vm_state building and task_state spawning. [ 838.339940] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.586s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 838.340542] env[68674]: DEBUG nova.compute.manager [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 838.343796] env[68674]: DEBUG oslo_concurrency.lockutils [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.008s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 838.345270] env[68674]: INFO nova.compute.claims [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 838.420290] env[68674]: DEBUG nova.network.neutron [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Successfully updated port: e952a804-62fd-4a82-b6de-457e739b719a {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 838.450251] env[68674]: INFO nova.compute.manager [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] bringing vm to original state: 'stopped' [ 838.583979] env[68674]: DEBUG oslo_vmware.api [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240136, 'name': PowerOnVM_Task, 'duration_secs': 0.442789} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.584268] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 838.584527] env[68674]: INFO nova.compute.manager [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Took 7.80 seconds to spawn the instance on the hypervisor. [ 838.584711] env[68674]: DEBUG nova.compute.manager [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 838.585474] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91b51b80-cbce-4857-b31e-1c8f6938fdb5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.845384] env[68674]: DEBUG nova.compute.utils [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 838.846812] env[68674]: DEBUG nova.compute.manager [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 838.846986] env[68674]: DEBUG nova.network.neutron [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 838.892567] env[68674]: DEBUG nova.policy [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e07f15f69a9e482784c39b9c7cda8a8c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2bca98e5a30741249b1bdee899ffe433', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 838.922122] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Acquiring lock "refresh_cache-2ed83aff-9a73-464b-914a-479d91cdfce0" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.922296] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Acquired lock "refresh_cache-2ed83aff-9a73-464b-914a-479d91cdfce0" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 838.922450] env[68674]: DEBUG nova.network.neutron [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 839.104020] env[68674]: INFO nova.compute.manager [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Took 50.43 seconds to build instance. [ 839.187957] env[68674]: DEBUG nova.network.neutron [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Successfully created port: 2cf52206-a0c1-4b57-886d-23df69181f20 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 839.351076] env[68674]: DEBUG nova.compute.manager [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 839.457473] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "a4cb1632-eada-4b10-a66f-64fecf45fd76" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 839.457473] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "a4cb1632-eada-4b10-a66f-64fecf45fd76" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 839.457676] env[68674]: DEBUG nova.compute.manager [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 839.458434] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-069e4533-0764-4760-9d3c-4333cd9b59ee {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.466905] env[68674]: DEBUG nova.network.neutron [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 839.468910] env[68674]: DEBUG nova.compute.manager [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68674) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 839.608116] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1cc267af-f1d2-4575-87db-b939dddfd4f0 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lock "7aa58e2f-1202-4252-9c38-ce53084c573f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.946s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 839.629727] env[68674]: DEBUG nova.network.neutron [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Updating instance_info_cache with network_info: [{"id": "e952a804-62fd-4a82-b6de-457e739b719a", "address": "fa:16:3e:5b:5a:d5", "network": {"id": "34c5cb42-0035-4a58-9419-79297b94c2e8", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1814730982-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b36ad45ac45e43f9880778908b6ffdf1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f408ce42-3cac-4d9d-9c05-15471d653a18", "external-id": "nsx-vlan-transportzone-265", "segmentation_id": 265, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape952a804-62", "ovs_interfaceid": "e952a804-62fd-4a82-b6de-457e739b719a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.812260] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb79218b-442c-46bf-990c-d252781bf07c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.822139] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56757347-6c9d-4eff-952f-aa280bbb2b76 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.862848] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee7850cb-1f5c-43ac-a322-68b235b7cd52 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.871505] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e63d57c-726c-42fd-a0e4-362d56fb5402 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.887482] env[68674]: DEBUG nova.compute.provider_tree [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 839.975762] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 839.976085] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a8d48ae8-0c49-4358-b9d0-7c30045a7392 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.983629] env[68674]: DEBUG oslo_vmware.api [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 839.983629] env[68674]: value = "task-3240137" [ 839.983629] env[68674]: _type = "Task" [ 839.983629] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.991527] env[68674]: DEBUG oslo_vmware.api [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240137, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.108401] env[68674]: DEBUG nova.compute.manager [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 840.132859] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Releasing lock "refresh_cache-2ed83aff-9a73-464b-914a-479d91cdfce0" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 840.133326] env[68674]: DEBUG nova.compute.manager [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Instance network_info: |[{"id": "e952a804-62fd-4a82-b6de-457e739b719a", "address": "fa:16:3e:5b:5a:d5", "network": {"id": "34c5cb42-0035-4a58-9419-79297b94c2e8", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1814730982-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b36ad45ac45e43f9880778908b6ffdf1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f408ce42-3cac-4d9d-9c05-15471d653a18", "external-id": "nsx-vlan-transportzone-265", "segmentation_id": 265, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape952a804-62", "ovs_interfaceid": "e952a804-62fd-4a82-b6de-457e739b719a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 840.133765] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5b:5a:d5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f408ce42-3cac-4d9d-9c05-15471d653a18', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e952a804-62fd-4a82-b6de-457e739b719a', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 840.142696] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Creating folder: Project (b36ad45ac45e43f9880778908b6ffdf1). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 840.142696] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2f5478ac-245f-4b50-aae5-1f5bb527be49 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.154638] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Created folder: Project (b36ad45ac45e43f9880778908b6ffdf1) in parent group-v647377. [ 840.154780] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Creating folder: Instances. Parent ref: group-v647567. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 840.155037] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5011dfab-7b2d-482c-971c-f8d45b925ad5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.166546] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Created folder: Instances in parent group-v647567. [ 840.166808] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 840.167016] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 840.167242] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-411bf0c6-6d18-44ce-92a4-0b67b124e77d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.189119] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 840.189119] env[68674]: value = "task-3240140" [ 840.189119] env[68674]: _type = "Task" [ 840.189119] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.199766] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240140, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.368975] env[68674]: DEBUG nova.compute.manager [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 840.377939] env[68674]: DEBUG nova.compute.manager [req-4f7888c0-eecd-41f5-b7cc-5840f05984b6 req-f120dadc-c109-4307-a5eb-147a6af0a9b0 service nova] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Received event network-changed-e952a804-62fd-4a82-b6de-457e739b719a {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 840.378253] env[68674]: DEBUG nova.compute.manager [req-4f7888c0-eecd-41f5-b7cc-5840f05984b6 req-f120dadc-c109-4307-a5eb-147a6af0a9b0 service nova] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Refreshing instance network info cache due to event network-changed-e952a804-62fd-4a82-b6de-457e739b719a. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 840.378775] env[68674]: DEBUG oslo_concurrency.lockutils [req-4f7888c0-eecd-41f5-b7cc-5840f05984b6 req-f120dadc-c109-4307-a5eb-147a6af0a9b0 service nova] Acquiring lock "refresh_cache-2ed83aff-9a73-464b-914a-479d91cdfce0" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.378775] env[68674]: DEBUG oslo_concurrency.lockutils [req-4f7888c0-eecd-41f5-b7cc-5840f05984b6 req-f120dadc-c109-4307-a5eb-147a6af0a9b0 service nova] Acquired lock "refresh_cache-2ed83aff-9a73-464b-914a-479d91cdfce0" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 840.378866] env[68674]: DEBUG nova.network.neutron [req-4f7888c0-eecd-41f5-b7cc-5840f05984b6 req-f120dadc-c109-4307-a5eb-147a6af0a9b0 service nova] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Refreshing network info cache for port e952a804-62fd-4a82-b6de-457e739b719a {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 840.392281] env[68674]: DEBUG nova.scheduler.client.report [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 840.405533] env[68674]: DEBUG nova.virt.hardware [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 840.405735] env[68674]: DEBUG nova.virt.hardware [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 840.405936] env[68674]: DEBUG nova.virt.hardware [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 840.407869] env[68674]: DEBUG nova.virt.hardware [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 840.408265] env[68674]: DEBUG nova.virt.hardware [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 840.408332] env[68674]: DEBUG nova.virt.hardware [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 840.408605] env[68674]: DEBUG nova.virt.hardware [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 840.408811] env[68674]: DEBUG nova.virt.hardware [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 840.409029] env[68674]: DEBUG nova.virt.hardware [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 840.410301] env[68674]: DEBUG nova.virt.hardware [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 840.410301] env[68674]: DEBUG nova.virt.hardware [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 840.410793] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9e0c024-a10e-4f69-9fd1-af3889cece5a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.422584] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76def6ef-88fa-475c-9722-dbe2b398ebab {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.493204] env[68674]: DEBUG oslo_vmware.api [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240137, 'name': PowerOffVM_Task, 'duration_secs': 0.206644} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.497062] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 840.497062] env[68674]: DEBUG nova.compute.manager [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 840.497062] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a3de0dd-2759-4fe2-9ccb-614fb996772c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.633063] env[68674]: DEBUG oslo_concurrency.lockutils [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 840.699259] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240140, 'name': CreateVM_Task, 'duration_secs': 0.351906} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.699259] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 840.699861] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.700068] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 840.700404] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 840.700689] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5bf8e63-ab0d-4164-a0c0-b78e86e0e02d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.706656] env[68674]: DEBUG oslo_vmware.api [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Waiting for the task: (returnval){ [ 840.706656] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d3a82d-b278-43f4-bcfd-83a7db441478" [ 840.706656] env[68674]: _type = "Task" [ 840.706656] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.717165] env[68674]: DEBUG oslo_vmware.api [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d3a82d-b278-43f4-bcfd-83a7db441478, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.817322] env[68674]: DEBUG nova.network.neutron [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Successfully updated port: 2cf52206-a0c1-4b57-886d-23df69181f20 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 840.899616] env[68674]: DEBUG oslo_concurrency.lockutils [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.556s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 840.900269] env[68674]: DEBUG nova.compute.manager [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 840.902830] env[68674]: DEBUG oslo_concurrency.lockutils [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.256s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 840.904598] env[68674]: INFO nova.compute.claims [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 841.010517] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "a4cb1632-eada-4b10-a66f-64fecf45fd76" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.553s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 841.130622] env[68674]: DEBUG nova.network.neutron [req-4f7888c0-eecd-41f5-b7cc-5840f05984b6 req-f120dadc-c109-4307-a5eb-147a6af0a9b0 service nova] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Updated VIF entry in instance network info cache for port e952a804-62fd-4a82-b6de-457e739b719a. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 841.131096] env[68674]: DEBUG nova.network.neutron [req-4f7888c0-eecd-41f5-b7cc-5840f05984b6 req-f120dadc-c109-4307-a5eb-147a6af0a9b0 service nova] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Updating instance_info_cache with network_info: [{"id": "e952a804-62fd-4a82-b6de-457e739b719a", "address": "fa:16:3e:5b:5a:d5", "network": {"id": "34c5cb42-0035-4a58-9419-79297b94c2e8", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1814730982-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b36ad45ac45e43f9880778908b6ffdf1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f408ce42-3cac-4d9d-9c05-15471d653a18", "external-id": "nsx-vlan-transportzone-265", "segmentation_id": 265, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape952a804-62", "ovs_interfaceid": "e952a804-62fd-4a82-b6de-457e739b719a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.218087] env[68674]: DEBUG oslo_vmware.api [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d3a82d-b278-43f4-bcfd-83a7db441478, 'name': SearchDatastore_Task, 'duration_secs': 0.021813} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.218401] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 841.218761] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 841.218878] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.219035] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 841.219221] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 841.219490] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0e1f37be-f19f-43a5-8300-4f08ab9d2125 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.229279] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 841.229494] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 841.230298] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3422d40d-4afe-4987-96a6-cab29bfad2fb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.235543] env[68674]: DEBUG oslo_vmware.api [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Waiting for the task: (returnval){ [ 841.235543] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c19050-148e-3cbc-9d4d-8cc1135b63f8" [ 841.235543] env[68674]: _type = "Task" [ 841.235543] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.243441] env[68674]: DEBUG oslo_vmware.api [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c19050-148e-3cbc-9d4d-8cc1135b63f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.269438] env[68674]: DEBUG nova.compute.manager [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Stashing vm_state: active {{(pid=68674) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 841.319681] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquiring lock "refresh_cache-63d6c185-db2c-4ede-a716-9a0dd432ab1f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.319842] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquired lock "refresh_cache-63d6c185-db2c-4ede-a716-9a0dd432ab1f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 841.319998] env[68674]: DEBUG nova.network.neutron [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 841.411763] env[68674]: DEBUG nova.compute.utils [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 841.415263] env[68674]: DEBUG nova.compute.manager [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 841.415263] env[68674]: DEBUG nova.network.neutron [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 841.469120] env[68674]: DEBUG nova.policy [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '26dfab4e06f2403697a602676410bbe7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38a52a0d746a4f16b5e4d5a6c984cc45', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 841.520903] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 841.634400] env[68674]: DEBUG oslo_concurrency.lockutils [req-4f7888c0-eecd-41f5-b7cc-5840f05984b6 req-f120dadc-c109-4307-a5eb-147a6af0a9b0 service nova] Releasing lock "refresh_cache-2ed83aff-9a73-464b-914a-479d91cdfce0" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 841.746649] env[68674]: DEBUG oslo_vmware.api [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c19050-148e-3cbc-9d4d-8cc1135b63f8, 'name': SearchDatastore_Task, 'duration_secs': 0.010782} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.747505] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd582aaf-f1c7-4f6b-8f26-b4416b663df6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.753362] env[68674]: DEBUG oslo_vmware.api [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Waiting for the task: (returnval){ [ 841.753362] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521ebb5b-0982-9609-ae97-10d9b30a22a9" [ 841.753362] env[68674]: _type = "Task" [ 841.753362] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.761723] env[68674]: DEBUG oslo_vmware.api [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521ebb5b-0982-9609-ae97-10d9b30a22a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.794684] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 841.839103] env[68674]: DEBUG nova.network.neutron [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Successfully created port: 9cbcb250-c330-4129-b949-513c43ad2f8d {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 841.882328] env[68674]: DEBUG nova.network.neutron [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 841.915637] env[68674]: DEBUG nova.compute.manager [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 842.048126] env[68674]: DEBUG nova.network.neutron [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Updating instance_info_cache with network_info: [{"id": "2cf52206-a0c1-4b57-886d-23df69181f20", "address": "fa:16:3e:be:8f:42", "network": {"id": "e4b29de6-94e6-452e-b362-eb8d7dd615b9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2121858122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2bca98e5a30741249b1bdee899ffe433", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721c6720-3ce0-450e-9951-a894f03acc27", "external-id": "nsx-vlan-transportzone-394", "segmentation_id": 394, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cf52206-a0", "ovs_interfaceid": "2cf52206-a0c1-4b57-886d-23df69181f20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.265462] env[68674]: DEBUG oslo_vmware.api [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521ebb5b-0982-9609-ae97-10d9b30a22a9, 'name': SearchDatastore_Task, 'duration_secs': 0.031091} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.270258] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 842.270258] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 2ed83aff-9a73-464b-914a-479d91cdfce0/2ed83aff-9a73-464b-914a-479d91cdfce0.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 842.270258] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-244b3317-3ac1-4d5e-96e9-f5e4ecaa35cf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.278374] env[68674]: DEBUG oslo_vmware.api [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Waiting for the task: (returnval){ [ 842.278374] env[68674]: value = "task-3240141" [ 842.278374] env[68674]: _type = "Task" [ 842.278374] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.290458] env[68674]: DEBUG oslo_vmware.api [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Task: {'id': task-3240141, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.412978] env[68674]: DEBUG nova.compute.manager [req-028a8eb7-ae71-4b09-89a1-a32f564d2623 req-bc2e2da5-f4b3-483a-b941-e20c69353362 service nova] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Received event network-vif-plugged-2cf52206-a0c1-4b57-886d-23df69181f20 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 842.413367] env[68674]: DEBUG oslo_concurrency.lockutils [req-028a8eb7-ae71-4b09-89a1-a32f564d2623 req-bc2e2da5-f4b3-483a-b941-e20c69353362 service nova] Acquiring lock "63d6c185-db2c-4ede-a716-9a0dd432ab1f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 842.413464] env[68674]: DEBUG oslo_concurrency.lockutils [req-028a8eb7-ae71-4b09-89a1-a32f564d2623 req-bc2e2da5-f4b3-483a-b941-e20c69353362 service nova] Lock "63d6c185-db2c-4ede-a716-9a0dd432ab1f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 842.413567] env[68674]: DEBUG oslo_concurrency.lockutils [req-028a8eb7-ae71-4b09-89a1-a32f564d2623 req-bc2e2da5-f4b3-483a-b941-e20c69353362 service nova] Lock "63d6c185-db2c-4ede-a716-9a0dd432ab1f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 842.413734] env[68674]: DEBUG nova.compute.manager [req-028a8eb7-ae71-4b09-89a1-a32f564d2623 req-bc2e2da5-f4b3-483a-b941-e20c69353362 service nova] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] No waiting events found dispatching network-vif-plugged-2cf52206-a0c1-4b57-886d-23df69181f20 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 842.413896] env[68674]: WARNING nova.compute.manager [req-028a8eb7-ae71-4b09-89a1-a32f564d2623 req-bc2e2da5-f4b3-483a-b941-e20c69353362 service nova] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Received unexpected event network-vif-plugged-2cf52206-a0c1-4b57-886d-23df69181f20 for instance with vm_state building and task_state spawning. [ 842.414065] env[68674]: DEBUG nova.compute.manager [req-028a8eb7-ae71-4b09-89a1-a32f564d2623 req-bc2e2da5-f4b3-483a-b941-e20c69353362 service nova] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Received event network-changed-2cf52206-a0c1-4b57-886d-23df69181f20 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 842.414219] env[68674]: DEBUG nova.compute.manager [req-028a8eb7-ae71-4b09-89a1-a32f564d2623 req-bc2e2da5-f4b3-483a-b941-e20c69353362 service nova] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Refreshing instance network info cache due to event network-changed-2cf52206-a0c1-4b57-886d-23df69181f20. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 842.414383] env[68674]: DEBUG oslo_concurrency.lockutils [req-028a8eb7-ae71-4b09-89a1-a32f564d2623 req-bc2e2da5-f4b3-483a-b941-e20c69353362 service nova] Acquiring lock "refresh_cache-63d6c185-db2c-4ede-a716-9a0dd432ab1f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.426772] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93298d96-e168-47a7-8f81-dc5c04b7aa7f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.435194] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ffdd353-a0b0-415d-8f79-eb7568a069bc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.469593] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a0da6c5-7d78-461b-9afa-e31f9bf34e0e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.478639] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-153a4893-a698-423d-b893-ca8c0a9b4b12 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.492797] env[68674]: DEBUG nova.compute.provider_tree [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 842.551038] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Releasing lock "refresh_cache-63d6c185-db2c-4ede-a716-9a0dd432ab1f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 842.551038] env[68674]: DEBUG nova.compute.manager [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Instance network_info: |[{"id": "2cf52206-a0c1-4b57-886d-23df69181f20", "address": "fa:16:3e:be:8f:42", "network": {"id": "e4b29de6-94e6-452e-b362-eb8d7dd615b9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2121858122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2bca98e5a30741249b1bdee899ffe433", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721c6720-3ce0-450e-9951-a894f03acc27", "external-id": "nsx-vlan-transportzone-394", "segmentation_id": 394, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cf52206-a0", "ovs_interfaceid": "2cf52206-a0c1-4b57-886d-23df69181f20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 842.551255] env[68674]: DEBUG oslo_concurrency.lockutils [req-028a8eb7-ae71-4b09-89a1-a32f564d2623 req-bc2e2da5-f4b3-483a-b941-e20c69353362 service nova] Acquired lock "refresh_cache-63d6c185-db2c-4ede-a716-9a0dd432ab1f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 842.551348] env[68674]: DEBUG nova.network.neutron [req-028a8eb7-ae71-4b09-89a1-a32f564d2623 req-bc2e2da5-f4b3-483a-b941-e20c69353362 service nova] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Refreshing network info cache for port 2cf52206-a0c1-4b57-886d-23df69181f20 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 842.552623] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:be:8f:42', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '721c6720-3ce0-450e-9951-a894f03acc27', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2cf52206-a0c1-4b57-886d-23df69181f20', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 842.560574] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Creating folder: Project (2bca98e5a30741249b1bdee899ffe433). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 842.561576] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a24e99a6-a713-4528-bc9e-6c9243c01f2a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.568908] env[68674]: DEBUG oslo_concurrency.lockutils [None req-71fd8626-83d7-4cba-88c0-3146b4a22916 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "a4cb1632-eada-4b10-a66f-64fecf45fd76" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 842.569471] env[68674]: DEBUG oslo_concurrency.lockutils [None req-71fd8626-83d7-4cba-88c0-3146b4a22916 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "a4cb1632-eada-4b10-a66f-64fecf45fd76" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 842.569471] env[68674]: DEBUG oslo_concurrency.lockutils [None req-71fd8626-83d7-4cba-88c0-3146b4a22916 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "a4cb1632-eada-4b10-a66f-64fecf45fd76-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 842.569628] env[68674]: DEBUG oslo_concurrency.lockutils [None req-71fd8626-83d7-4cba-88c0-3146b4a22916 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "a4cb1632-eada-4b10-a66f-64fecf45fd76-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 842.569855] env[68674]: DEBUG oslo_concurrency.lockutils [None req-71fd8626-83d7-4cba-88c0-3146b4a22916 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "a4cb1632-eada-4b10-a66f-64fecf45fd76-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 842.572984] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Created folder: Project (2bca98e5a30741249b1bdee899ffe433) in parent group-v647377. [ 842.573227] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Creating folder: Instances. Parent ref: group-v647570. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 842.573737] env[68674]: INFO nova.compute.manager [None req-71fd8626-83d7-4cba-88c0-3146b4a22916 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Terminating instance [ 842.575091] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-503a12dc-3e8c-455a-96f8-7ccf2d1355e4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.584895] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Created folder: Instances in parent group-v647570. [ 842.585164] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 842.585966] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 842.586256] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a3c21d2f-9668-4987-9aae-1aa655f8aaa8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.610077] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 842.610077] env[68674]: value = "task-3240144" [ 842.610077] env[68674]: _type = "Task" [ 842.610077] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.618542] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240144, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.793971] env[68674]: DEBUG oslo_vmware.api [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Task: {'id': task-3240141, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.931283] env[68674]: DEBUG nova.compute.manager [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 842.962152] env[68674]: DEBUG nova.virt.hardware [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 842.962762] env[68674]: DEBUG nova.virt.hardware [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 842.963145] env[68674]: DEBUG nova.virt.hardware [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 842.963547] env[68674]: DEBUG nova.virt.hardware [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 842.964154] env[68674]: DEBUG nova.virt.hardware [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 842.964154] env[68674]: DEBUG nova.virt.hardware [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 842.964447] env[68674]: DEBUG nova.virt.hardware [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 842.964616] env[68674]: DEBUG nova.virt.hardware [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 842.964789] env[68674]: DEBUG nova.virt.hardware [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 842.964949] env[68674]: DEBUG nova.virt.hardware [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 842.965142] env[68674]: DEBUG nova.virt.hardware [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 842.966015] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d09c0c5-802a-4111-96a2-e55a454ad7d3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.976152] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24f1d83d-10b4-4f03-8d25-0fbe4458944e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.995735] env[68674]: DEBUG nova.scheduler.client.report [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 843.079957] env[68674]: DEBUG nova.compute.manager [None req-71fd8626-83d7-4cba-88c0-3146b4a22916 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 843.079957] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-71fd8626-83d7-4cba-88c0-3146b4a22916 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 843.081384] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f43d0f18-8387-42db-ad4a-9940b442e672 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.088764] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-71fd8626-83d7-4cba-88c0-3146b4a22916 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 843.089852] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cc91effa-742f-4e54-89f8-ec1f6ea552a5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.119820] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240144, 'name': CreateVM_Task, 'duration_secs': 0.496623} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.120015] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 843.120688] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.120866] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 843.121195] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 843.121852] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7445793-03be-48db-9507-43226a7bcd83 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.126407] env[68674]: DEBUG oslo_vmware.api [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 843.126407] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b9002f-64ed-22d4-0341-ac4f76546490" [ 843.126407] env[68674]: _type = "Task" [ 843.126407] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.134061] env[68674]: DEBUG oslo_vmware.api [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b9002f-64ed-22d4-0341-ac4f76546490, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.155383] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-71fd8626-83d7-4cba-88c0-3146b4a22916 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 843.155601] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-71fd8626-83d7-4cba-88c0-3146b4a22916 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 843.155783] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-71fd8626-83d7-4cba-88c0-3146b4a22916 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Deleting the datastore file [datastore2] a4cb1632-eada-4b10-a66f-64fecf45fd76 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 843.156042] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b7a45b5a-1c9e-4387-a5f0-56280f1a4d68 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.162100] env[68674]: DEBUG oslo_vmware.api [None req-71fd8626-83d7-4cba-88c0-3146b4a22916 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 843.162100] env[68674]: value = "task-3240146" [ 843.162100] env[68674]: _type = "Task" [ 843.162100] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.172188] env[68674]: DEBUG oslo_vmware.api [None req-71fd8626-83d7-4cba-88c0-3146b4a22916 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240146, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.295488] env[68674]: DEBUG oslo_vmware.api [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Task: {'id': task-3240141, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.634325} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.296081] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 2ed83aff-9a73-464b-914a-479d91cdfce0/2ed83aff-9a73-464b-914a-479d91cdfce0.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 843.296933] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 843.297335] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eebb2f82-b989-47ad-8ae8-7a3aab23b4e5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.305125] env[68674]: DEBUG oslo_vmware.api [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Waiting for the task: (returnval){ [ 843.305125] env[68674]: value = "task-3240147" [ 843.305125] env[68674]: _type = "Task" [ 843.305125] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.313504] env[68674]: DEBUG oslo_vmware.api [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Task: {'id': task-3240147, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.326431] env[68674]: DEBUG nova.network.neutron [req-028a8eb7-ae71-4b09-89a1-a32f564d2623 req-bc2e2da5-f4b3-483a-b941-e20c69353362 service nova] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Updated VIF entry in instance network info cache for port 2cf52206-a0c1-4b57-886d-23df69181f20. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 843.326782] env[68674]: DEBUG nova.network.neutron [req-028a8eb7-ae71-4b09-89a1-a32f564d2623 req-bc2e2da5-f4b3-483a-b941-e20c69353362 service nova] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Updating instance_info_cache with network_info: [{"id": "2cf52206-a0c1-4b57-886d-23df69181f20", "address": "fa:16:3e:be:8f:42", "network": {"id": "e4b29de6-94e6-452e-b362-eb8d7dd615b9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2121858122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2bca98e5a30741249b1bdee899ffe433", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721c6720-3ce0-450e-9951-a894f03acc27", "external-id": "nsx-vlan-transportzone-394", "segmentation_id": 394, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cf52206-a0", "ovs_interfaceid": "2cf52206-a0c1-4b57-886d-23df69181f20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.499706] env[68674]: DEBUG nova.network.neutron [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Successfully updated port: 9cbcb250-c330-4129-b949-513c43ad2f8d {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 843.506327] env[68674]: DEBUG oslo_concurrency.lockutils [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.600s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 843.506327] env[68674]: DEBUG nova.compute.manager [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 843.506401] env[68674]: DEBUG oslo_concurrency.lockutils [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 33.164s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 843.506569] env[68674]: DEBUG nova.objects.instance [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68674) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 843.636065] env[68674]: DEBUG oslo_vmware.api [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b9002f-64ed-22d4-0341-ac4f76546490, 'name': SearchDatastore_Task, 'duration_secs': 0.009737} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.636362] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 843.636802] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 843.636875] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.636965] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 843.637155] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 843.637400] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b54cd9b-59ed-4113-a4ce-4b04d2221865 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.654312] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 843.654490] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 843.655200] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-acf5117a-8475-4e24-8365-14bcb808b483 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.660119] env[68674]: DEBUG oslo_vmware.api [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 843.660119] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527017d3-3235-de68-be49-3c82cbd58063" [ 843.660119] env[68674]: _type = "Task" [ 843.660119] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.669812] env[68674]: DEBUG oslo_vmware.api [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527017d3-3235-de68-be49-3c82cbd58063, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.672585] env[68674]: DEBUG oslo_vmware.api [None req-71fd8626-83d7-4cba-88c0-3146b4a22916 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240146, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.130993} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.672804] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-71fd8626-83d7-4cba-88c0-3146b4a22916 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 843.672981] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-71fd8626-83d7-4cba-88c0-3146b4a22916 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 843.673216] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-71fd8626-83d7-4cba-88c0-3146b4a22916 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 843.673401] env[68674]: INFO nova.compute.manager [None req-71fd8626-83d7-4cba-88c0-3146b4a22916 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Took 0.59 seconds to destroy the instance on the hypervisor. [ 843.673626] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-71fd8626-83d7-4cba-88c0-3146b4a22916 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 843.673803] env[68674]: DEBUG nova.compute.manager [-] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 843.673895] env[68674]: DEBUG nova.network.neutron [-] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 843.814127] env[68674]: DEBUG oslo_vmware.api [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Task: {'id': task-3240147, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.055733} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.814538] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 843.815183] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6ee58ea-13bc-4288-9df9-165e632e535b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.836518] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Reconfiguring VM instance instance-00000040 to attach disk [datastore2] 2ed83aff-9a73-464b-914a-479d91cdfce0/2ed83aff-9a73-464b-914a-479d91cdfce0.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 843.837068] env[68674]: DEBUG oslo_concurrency.lockutils [req-028a8eb7-ae71-4b09-89a1-a32f564d2623 req-bc2e2da5-f4b3-483a-b941-e20c69353362 service nova] Releasing lock "refresh_cache-63d6c185-db2c-4ede-a716-9a0dd432ab1f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 843.837431] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-015479d6-8a07-4f44-9a5e-c2a23b5f750c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.857143] env[68674]: DEBUG oslo_vmware.api [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Waiting for the task: (returnval){ [ 843.857143] env[68674]: value = "task-3240148" [ 843.857143] env[68674]: _type = "Task" [ 843.857143] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.864741] env[68674]: DEBUG oslo_vmware.api [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Task: {'id': task-3240148, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.002120] env[68674]: DEBUG oslo_concurrency.lockutils [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquiring lock "refresh_cache-f3e7cacd-20d3-4dbe-89b0-80d89173069a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.002329] env[68674]: DEBUG oslo_concurrency.lockutils [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquired lock "refresh_cache-f3e7cacd-20d3-4dbe-89b0-80d89173069a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 844.002493] env[68674]: DEBUG nova.network.neutron [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 844.010292] env[68674]: DEBUG nova.compute.utils [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 844.016964] env[68674]: DEBUG nova.compute.manager [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 844.016964] env[68674]: DEBUG nova.network.neutron [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 844.054588] env[68674]: DEBUG nova.policy [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '26dfab4e06f2403697a602676410bbe7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38a52a0d746a4f16b5e4d5a6c984cc45', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 844.170576] env[68674]: DEBUG oslo_vmware.api [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527017d3-3235-de68-be49-3c82cbd58063, 'name': SearchDatastore_Task, 'duration_secs': 0.081445} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.171383] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26346c0e-8aae-4d82-a8e1-6c9f142b041a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.177046] env[68674]: DEBUG oslo_vmware.api [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 844.177046] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ff5b5c-2e60-eb59-b08e-89932ab1d129" [ 844.177046] env[68674]: _type = "Task" [ 844.177046] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.184709] env[68674]: DEBUG oslo_vmware.api [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ff5b5c-2e60-eb59-b08e-89932ab1d129, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.319081] env[68674]: DEBUG nova.network.neutron [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Successfully created port: d1611897-c41e-41b8-a60f-5c6f36a7028f {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 844.367570] env[68674]: DEBUG oslo_vmware.api [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Task: {'id': task-3240148, 'name': ReconfigVM_Task, 'duration_secs': 0.306895} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.367868] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Reconfigured VM instance instance-00000040 to attach disk [datastore2] 2ed83aff-9a73-464b-914a-479d91cdfce0/2ed83aff-9a73-464b-914a-479d91cdfce0.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 844.368880] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-35a5e74f-fe4c-454d-93e3-3cd4f8209bba {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.374736] env[68674]: DEBUG oslo_vmware.api [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Waiting for the task: (returnval){ [ 844.374736] env[68674]: value = "task-3240149" [ 844.374736] env[68674]: _type = "Task" [ 844.374736] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.383649] env[68674]: DEBUG oslo_vmware.api [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Task: {'id': task-3240149, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.412234] env[68674]: DEBUG nova.network.neutron [-] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.481111] env[68674]: DEBUG nova.compute.manager [req-207866a8-3593-4151-abd5-fc23a8792a81 req-3207d5a4-010e-4c84-9848-ed51e759099d service nova] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Received event network-vif-plugged-9cbcb250-c330-4129-b949-513c43ad2f8d {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 844.481287] env[68674]: DEBUG oslo_concurrency.lockutils [req-207866a8-3593-4151-abd5-fc23a8792a81 req-3207d5a4-010e-4c84-9848-ed51e759099d service nova] Acquiring lock "f3e7cacd-20d3-4dbe-89b0-80d89173069a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 844.481488] env[68674]: DEBUG oslo_concurrency.lockutils [req-207866a8-3593-4151-abd5-fc23a8792a81 req-3207d5a4-010e-4c84-9848-ed51e759099d service nova] Lock "f3e7cacd-20d3-4dbe-89b0-80d89173069a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 844.481647] env[68674]: DEBUG oslo_concurrency.lockutils [req-207866a8-3593-4151-abd5-fc23a8792a81 req-3207d5a4-010e-4c84-9848-ed51e759099d service nova] Lock "f3e7cacd-20d3-4dbe-89b0-80d89173069a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 844.481835] env[68674]: DEBUG nova.compute.manager [req-207866a8-3593-4151-abd5-fc23a8792a81 req-3207d5a4-010e-4c84-9848-ed51e759099d service nova] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] No waiting events found dispatching network-vif-plugged-9cbcb250-c330-4129-b949-513c43ad2f8d {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 844.481966] env[68674]: WARNING nova.compute.manager [req-207866a8-3593-4151-abd5-fc23a8792a81 req-3207d5a4-010e-4c84-9848-ed51e759099d service nova] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Received unexpected event network-vif-plugged-9cbcb250-c330-4129-b949-513c43ad2f8d for instance with vm_state building and task_state spawning. [ 844.482162] env[68674]: DEBUG nova.compute.manager [req-207866a8-3593-4151-abd5-fc23a8792a81 req-3207d5a4-010e-4c84-9848-ed51e759099d service nova] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Received event network-changed-9cbcb250-c330-4129-b949-513c43ad2f8d {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 844.482323] env[68674]: DEBUG nova.compute.manager [req-207866a8-3593-4151-abd5-fc23a8792a81 req-3207d5a4-010e-4c84-9848-ed51e759099d service nova] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Refreshing instance network info cache due to event network-changed-9cbcb250-c330-4129-b949-513c43ad2f8d. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 844.482490] env[68674]: DEBUG oslo_concurrency.lockutils [req-207866a8-3593-4151-abd5-fc23a8792a81 req-3207d5a4-010e-4c84-9848-ed51e759099d service nova] Acquiring lock "refresh_cache-f3e7cacd-20d3-4dbe-89b0-80d89173069a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.519395] env[68674]: DEBUG oslo_concurrency.lockutils [None req-97b0cfff-3e46-4dab-b4e6-ff86377bf3c1 tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 844.520662] env[68674]: DEBUG nova.compute.manager [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 844.527443] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d287e0de-2631-4184-bc6b-798f815f224c tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.953s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 844.527677] env[68674]: DEBUG nova.objects.instance [None req-d287e0de-2631-4184-bc6b-798f815f224c tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Lazy-loading 'resources' on Instance uuid 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 844.551866] env[68674]: DEBUG nova.network.neutron [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 844.686572] env[68674]: DEBUG oslo_vmware.api [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ff5b5c-2e60-eb59-b08e-89932ab1d129, 'name': SearchDatastore_Task, 'duration_secs': 0.080079} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.686831] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 844.687097] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 63d6c185-db2c-4ede-a716-9a0dd432ab1f/63d6c185-db2c-4ede-a716-9a0dd432ab1f.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 844.687354] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f5c32061-d9a0-4f10-9a54-540e1eda4f3a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.693363] env[68674]: DEBUG oslo_vmware.api [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 844.693363] env[68674]: value = "task-3240150" [ 844.693363] env[68674]: _type = "Task" [ 844.693363] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.700875] env[68674]: DEBUG oslo_vmware.api [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240150, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.702836] env[68674]: DEBUG nova.network.neutron [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Updating instance_info_cache with network_info: [{"id": "9cbcb250-c330-4129-b949-513c43ad2f8d", "address": "fa:16:3e:6f:97:07", "network": {"id": "f8839054-ed22-4f0f-abbf-f8ae7ca5f59d", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-782448124-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38a52a0d746a4f16b5e4d5a6c984cc45", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48512b02-ad5c-4105-ba7d-fd4775acf8e1", "external-id": "nsx-vlan-transportzone-516", "segmentation_id": 516, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cbcb250-c3", "ovs_interfaceid": "9cbcb250-c330-4129-b949-513c43ad2f8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.885236] env[68674]: DEBUG oslo_vmware.api [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Task: {'id': task-3240149, 'name': Rename_Task, 'duration_secs': 0.134921} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.885538] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 844.885801] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-64b90c28-ed72-47d2-b0fb-ad6c0e2ab58b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.892157] env[68674]: DEBUG oslo_vmware.api [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Waiting for the task: (returnval){ [ 844.892157] env[68674]: value = "task-3240151" [ 844.892157] env[68674]: _type = "Task" [ 844.892157] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.900194] env[68674]: DEBUG oslo_vmware.api [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Task: {'id': task-3240151, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.914973] env[68674]: INFO nova.compute.manager [-] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Took 1.24 seconds to deallocate network for instance. [ 845.206363] env[68674]: DEBUG oslo_concurrency.lockutils [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Releasing lock "refresh_cache-f3e7cacd-20d3-4dbe-89b0-80d89173069a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 845.206696] env[68674]: DEBUG nova.compute.manager [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Instance network_info: |[{"id": "9cbcb250-c330-4129-b949-513c43ad2f8d", "address": "fa:16:3e:6f:97:07", "network": {"id": "f8839054-ed22-4f0f-abbf-f8ae7ca5f59d", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-782448124-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38a52a0d746a4f16b5e4d5a6c984cc45", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48512b02-ad5c-4105-ba7d-fd4775acf8e1", "external-id": "nsx-vlan-transportzone-516", "segmentation_id": 516, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cbcb250-c3", "ovs_interfaceid": "9cbcb250-c330-4129-b949-513c43ad2f8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 845.206990] env[68674]: DEBUG oslo_vmware.api [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240150, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.210311] env[68674]: DEBUG oslo_concurrency.lockutils [req-207866a8-3593-4151-abd5-fc23a8792a81 req-3207d5a4-010e-4c84-9848-ed51e759099d service nova] Acquired lock "refresh_cache-f3e7cacd-20d3-4dbe-89b0-80d89173069a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 845.210508] env[68674]: DEBUG nova.network.neutron [req-207866a8-3593-4151-abd5-fc23a8792a81 req-3207d5a4-010e-4c84-9848-ed51e759099d service nova] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Refreshing network info cache for port 9cbcb250-c330-4129-b949-513c43ad2f8d {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 845.211731] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:97:07', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '48512b02-ad5c-4105-ba7d-fd4775acf8e1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9cbcb250-c330-4129-b949-513c43ad2f8d', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 845.221343] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 845.225189] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 845.225728] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-33e6d304-a89b-40f6-8e4e-993e84041164 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.249418] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 845.249418] env[68674]: value = "task-3240152" [ 845.249418] env[68674]: _type = "Task" [ 845.249418] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.260025] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240152, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.407238] env[68674]: DEBUG oslo_vmware.api [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Task: {'id': task-3240151, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.424142] env[68674]: DEBUG oslo_concurrency.lockutils [None req-71fd8626-83d7-4cba-88c0-3146b4a22916 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 845.482415] env[68674]: DEBUG nova.network.neutron [req-207866a8-3593-4151-abd5-fc23a8792a81 req-3207d5a4-010e-4c84-9848-ed51e759099d service nova] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Updated VIF entry in instance network info cache for port 9cbcb250-c330-4129-b949-513c43ad2f8d. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 845.482923] env[68674]: DEBUG nova.network.neutron [req-207866a8-3593-4151-abd5-fc23a8792a81 req-3207d5a4-010e-4c84-9848-ed51e759099d service nova] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Updating instance_info_cache with network_info: [{"id": "9cbcb250-c330-4129-b949-513c43ad2f8d", "address": "fa:16:3e:6f:97:07", "network": {"id": "f8839054-ed22-4f0f-abbf-f8ae7ca5f59d", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-782448124-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38a52a0d746a4f16b5e4d5a6c984cc45", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48512b02-ad5c-4105-ba7d-fd4775acf8e1", "external-id": "nsx-vlan-transportzone-516", "segmentation_id": 516, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cbcb250-c3", "ovs_interfaceid": "9cbcb250-c330-4129-b949-513c43ad2f8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.538077] env[68674]: DEBUG nova.compute.manager [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 845.575860] env[68674]: DEBUG nova.virt.hardware [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 845.576070] env[68674]: DEBUG nova.virt.hardware [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 845.576248] env[68674]: DEBUG nova.virt.hardware [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 845.576493] env[68674]: DEBUG nova.virt.hardware [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 845.576691] env[68674]: DEBUG nova.virt.hardware [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 845.577309] env[68674]: DEBUG nova.virt.hardware [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 845.577309] env[68674]: DEBUG nova.virt.hardware [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 845.577309] env[68674]: DEBUG nova.virt.hardware [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 845.577498] env[68674]: DEBUG nova.virt.hardware [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 845.578199] env[68674]: DEBUG nova.virt.hardware [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 845.578199] env[68674]: DEBUG nova.virt.hardware [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 845.578692] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02427a8e-799b-4e20-bea1-1558bcd4b2fa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.588886] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e6e9cdd-f288-4abb-8f9a-11b2a2d59e27 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.607093] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ccb8490-58ad-4ca9-bed7-d1168a0a7616 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.614292] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e346da6f-3ef4-4914-af6d-56e6f4993f6c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.645543] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7f8cea9-fe74-461c-9154-6d776a924d50 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.653179] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e80501e2-1d9e-42e5-b886-ed14faf0ca44 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.666484] env[68674]: DEBUG nova.compute.provider_tree [None req-d287e0de-2631-4184-bc6b-798f815f224c tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 845.705289] env[68674]: DEBUG oslo_vmware.api [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240150, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.531542} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.705289] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 63d6c185-db2c-4ede-a716-9a0dd432ab1f/63d6c185-db2c-4ede-a716-9a0dd432ab1f.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 845.705450] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 845.705664] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4449ee43-c8d5-4546-9817-4a1c02f5615a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.711548] env[68674]: DEBUG oslo_vmware.api [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 845.711548] env[68674]: value = "task-3240153" [ 845.711548] env[68674]: _type = "Task" [ 845.711548] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.721011] env[68674]: DEBUG oslo_vmware.api [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240153, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.757590] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240152, 'name': CreateVM_Task, 'duration_secs': 0.438101} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.757759] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 845.758443] env[68674]: DEBUG oslo_concurrency.lockutils [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.758676] env[68674]: DEBUG oslo_concurrency.lockutils [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 845.759097] env[68674]: DEBUG oslo_concurrency.lockutils [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 845.759349] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8f2fdab-6e12-451c-b497-e07d7d11c92c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.763869] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 845.763869] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]522856c7-6e60-bc20-0f00-3b1534fc3fd9" [ 845.763869] env[68674]: _type = "Task" [ 845.763869] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.771638] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]522856c7-6e60-bc20-0f00-3b1534fc3fd9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.893472] env[68674]: DEBUG nova.network.neutron [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Successfully updated port: d1611897-c41e-41b8-a60f-5c6f36a7028f {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 845.905077] env[68674]: DEBUG oslo_vmware.api [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Task: {'id': task-3240151, 'name': PowerOnVM_Task, 'duration_secs': 0.733893} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.905409] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 845.905722] env[68674]: INFO nova.compute.manager [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Took 8.10 seconds to spawn the instance on the hypervisor. [ 845.905722] env[68674]: DEBUG nova.compute.manager [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 845.907146] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e222f349-724d-456e-a62e-a0faacd15fd1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.986042] env[68674]: DEBUG oslo_concurrency.lockutils [req-207866a8-3593-4151-abd5-fc23a8792a81 req-3207d5a4-010e-4c84-9848-ed51e759099d service nova] Releasing lock "refresh_cache-f3e7cacd-20d3-4dbe-89b0-80d89173069a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 845.986347] env[68674]: DEBUG nova.compute.manager [req-207866a8-3593-4151-abd5-fc23a8792a81 req-3207d5a4-010e-4c84-9848-ed51e759099d service nova] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Received event network-vif-deleted-14aed01d-1948-4a24-8075-fa23078ec7a6 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 846.169805] env[68674]: DEBUG nova.scheduler.client.report [None req-d287e0de-2631-4184-bc6b-798f815f224c tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 846.222873] env[68674]: DEBUG oslo_vmware.api [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240153, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.281466} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.223157] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 846.224032] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ab62b01-9a3a-4299-88a8-609cfda49e19 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.246726] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Reconfiguring VM instance instance-00000041 to attach disk [datastore2] 63d6c185-db2c-4ede-a716-9a0dd432ab1f/63d6c185-db2c-4ede-a716-9a0dd432ab1f.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 846.247013] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-422f9e18-a299-4869-8fa4-6ab1184501ea {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.269114] env[68674]: DEBUG oslo_vmware.api [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 846.269114] env[68674]: value = "task-3240154" [ 846.269114] env[68674]: _type = "Task" [ 846.269114] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.276193] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]522856c7-6e60-bc20-0f00-3b1534fc3fd9, 'name': SearchDatastore_Task, 'duration_secs': 0.039393} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.276813] env[68674]: DEBUG oslo_concurrency.lockutils [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 846.277064] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 846.277302] env[68674]: DEBUG oslo_concurrency.lockutils [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.277450] env[68674]: DEBUG oslo_concurrency.lockutils [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 846.277628] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 846.277889] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-07881b22-b2d0-43ab-9d2e-dc76d1e22406 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.282871] env[68674]: DEBUG oslo_vmware.api [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240154, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.292022] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 846.292256] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 846.293097] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0312cf21-9636-416d-8635-8ebd0a5f1b6e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.298661] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 846.298661] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52074a65-a970-095a-b914-6a9dc760d01a" [ 846.298661] env[68674]: _type = "Task" [ 846.298661] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.307138] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52074a65-a970-095a-b914-6a9dc760d01a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.400357] env[68674]: DEBUG oslo_concurrency.lockutils [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquiring lock "refresh_cache-3a0a7950-af31-4a20-a19d-44fbce8735a2" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.400526] env[68674]: DEBUG oslo_concurrency.lockutils [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquired lock "refresh_cache-3a0a7950-af31-4a20-a19d-44fbce8735a2" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 846.400696] env[68674]: DEBUG nova.network.neutron [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 846.424948] env[68674]: INFO nova.compute.manager [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Took 45.68 seconds to build instance. [ 846.512535] env[68674]: DEBUG nova.compute.manager [req-0d0c662c-a6a1-4d57-8129-7138cd37dc3c req-2f285c5d-46cd-49b7-a450-8a953c1cd688 service nova] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Received event network-vif-plugged-d1611897-c41e-41b8-a60f-5c6f36a7028f {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 846.512777] env[68674]: DEBUG oslo_concurrency.lockutils [req-0d0c662c-a6a1-4d57-8129-7138cd37dc3c req-2f285c5d-46cd-49b7-a450-8a953c1cd688 service nova] Acquiring lock "3a0a7950-af31-4a20-a19d-44fbce8735a2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 846.512976] env[68674]: DEBUG oslo_concurrency.lockutils [req-0d0c662c-a6a1-4d57-8129-7138cd37dc3c req-2f285c5d-46cd-49b7-a450-8a953c1cd688 service nova] Lock "3a0a7950-af31-4a20-a19d-44fbce8735a2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 846.513191] env[68674]: DEBUG oslo_concurrency.lockutils [req-0d0c662c-a6a1-4d57-8129-7138cd37dc3c req-2f285c5d-46cd-49b7-a450-8a953c1cd688 service nova] Lock "3a0a7950-af31-4a20-a19d-44fbce8735a2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 846.513372] env[68674]: DEBUG nova.compute.manager [req-0d0c662c-a6a1-4d57-8129-7138cd37dc3c req-2f285c5d-46cd-49b7-a450-8a953c1cd688 service nova] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] No waiting events found dispatching network-vif-plugged-d1611897-c41e-41b8-a60f-5c6f36a7028f {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 846.513534] env[68674]: WARNING nova.compute.manager [req-0d0c662c-a6a1-4d57-8129-7138cd37dc3c req-2f285c5d-46cd-49b7-a450-8a953c1cd688 service nova] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Received unexpected event network-vif-plugged-d1611897-c41e-41b8-a60f-5c6f36a7028f for instance with vm_state building and task_state spawning. [ 846.513690] env[68674]: DEBUG nova.compute.manager [req-0d0c662c-a6a1-4d57-8129-7138cd37dc3c req-2f285c5d-46cd-49b7-a450-8a953c1cd688 service nova] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Received event network-changed-d1611897-c41e-41b8-a60f-5c6f36a7028f {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 846.513840] env[68674]: DEBUG nova.compute.manager [req-0d0c662c-a6a1-4d57-8129-7138cd37dc3c req-2f285c5d-46cd-49b7-a450-8a953c1cd688 service nova] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Refreshing instance network info cache due to event network-changed-d1611897-c41e-41b8-a60f-5c6f36a7028f. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 846.514033] env[68674]: DEBUG oslo_concurrency.lockutils [req-0d0c662c-a6a1-4d57-8129-7138cd37dc3c req-2f285c5d-46cd-49b7-a450-8a953c1cd688 service nova] Acquiring lock "refresh_cache-3a0a7950-af31-4a20-a19d-44fbce8735a2" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.675659] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d287e0de-2631-4184-bc6b-798f815f224c tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.148s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 846.678299] env[68674]: DEBUG oslo_concurrency.lockutils [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.549s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 846.678540] env[68674]: DEBUG nova.objects.instance [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Lazy-loading 'resources' on Instance uuid 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 846.697443] env[68674]: INFO nova.scheduler.client.report [None req-d287e0de-2631-4184-bc6b-798f815f224c tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Deleted allocations for instance 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f [ 846.785834] env[68674]: DEBUG oslo_vmware.api [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240154, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.817454] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52074a65-a970-095a-b914-6a9dc760d01a, 'name': SearchDatastore_Task, 'duration_secs': 0.022096} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.818912] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d354b05-21a5-488b-a1d5-9a8e216286b0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.827331] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 846.827331] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52215a31-e0f9-c84a-87ba-3d50207e2587" [ 846.827331] env[68674]: _type = "Task" [ 846.827331] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.842024] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52215a31-e0f9-c84a-87ba-3d50207e2587, 'name': SearchDatastore_Task, 'duration_secs': 0.009712} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.842585] env[68674]: DEBUG oslo_concurrency.lockutils [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 846.842852] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] f3e7cacd-20d3-4dbe-89b0-80d89173069a/f3e7cacd-20d3-4dbe-89b0-80d89173069a.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 846.843125] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a0b34373-9875-4818-8e03-6cff10285f4d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.850050] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 846.850050] env[68674]: value = "task-3240155" [ 846.850050] env[68674]: _type = "Task" [ 846.850050] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.859911] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3240155, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.927633] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f9a4e5e5-acb4-43b5-8bf7-e220ae7787af tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Lock "2ed83aff-9a73-464b-914a-479d91cdfce0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.189s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 846.935208] env[68674]: DEBUG nova.network.neutron [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 847.101729] env[68674]: DEBUG nova.network.neutron [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Updating instance_info_cache with network_info: [{"id": "d1611897-c41e-41b8-a60f-5c6f36a7028f", "address": "fa:16:3e:86:ba:e8", "network": {"id": "f8839054-ed22-4f0f-abbf-f8ae7ca5f59d", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-782448124-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38a52a0d746a4f16b5e4d5a6c984cc45", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48512b02-ad5c-4105-ba7d-fd4775acf8e1", "external-id": "nsx-vlan-transportzone-516", "segmentation_id": 516, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1611897-c4", "ovs_interfaceid": "d1611897-c41e-41b8-a60f-5c6f36a7028f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.210836] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d287e0de-2631-4184-bc6b-798f815f224c tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Lock "3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.160s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 847.281395] env[68674]: DEBUG oslo_vmware.api [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240154, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.311741] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0554f438-d8f4-4c3a-955d-98d0a296da5c tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Acquiring lock "2ed83aff-9a73-464b-914a-479d91cdfce0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 847.311994] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0554f438-d8f4-4c3a-955d-98d0a296da5c tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Lock "2ed83aff-9a73-464b-914a-479d91cdfce0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 847.312275] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0554f438-d8f4-4c3a-955d-98d0a296da5c tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Acquiring lock "2ed83aff-9a73-464b-914a-479d91cdfce0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 847.312473] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0554f438-d8f4-4c3a-955d-98d0a296da5c tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Lock "2ed83aff-9a73-464b-914a-479d91cdfce0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 847.312645] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0554f438-d8f4-4c3a-955d-98d0a296da5c tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Lock "2ed83aff-9a73-464b-914a-479d91cdfce0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 847.314755] env[68674]: INFO nova.compute.manager [None req-0554f438-d8f4-4c3a-955d-98d0a296da5c tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Terminating instance [ 847.363020] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3240155, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476657} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.363123] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] f3e7cacd-20d3-4dbe-89b0-80d89173069a/f3e7cacd-20d3-4dbe-89b0-80d89173069a.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 847.363616] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 847.363616] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2bd81227-b717-4a01-8cce-738f64d9cd58 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.371204] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 847.371204] env[68674]: value = "task-3240156" [ 847.371204] env[68674]: _type = "Task" [ 847.371204] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.379009] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3240156, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.430793] env[68674]: DEBUG nova.compute.manager [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 847.606589] env[68674]: DEBUG oslo_concurrency.lockutils [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Releasing lock "refresh_cache-3a0a7950-af31-4a20-a19d-44fbce8735a2" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 847.606895] env[68674]: DEBUG nova.compute.manager [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Instance network_info: |[{"id": "d1611897-c41e-41b8-a60f-5c6f36a7028f", "address": "fa:16:3e:86:ba:e8", "network": {"id": "f8839054-ed22-4f0f-abbf-f8ae7ca5f59d", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-782448124-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38a52a0d746a4f16b5e4d5a6c984cc45", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48512b02-ad5c-4105-ba7d-fd4775acf8e1", "external-id": "nsx-vlan-transportzone-516", "segmentation_id": 516, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1611897-c4", "ovs_interfaceid": "d1611897-c41e-41b8-a60f-5c6f36a7028f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 847.607385] env[68674]: DEBUG oslo_concurrency.lockutils [req-0d0c662c-a6a1-4d57-8129-7138cd37dc3c req-2f285c5d-46cd-49b7-a450-8a953c1cd688 service nova] Acquired lock "refresh_cache-3a0a7950-af31-4a20-a19d-44fbce8735a2" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 847.607575] env[68674]: DEBUG nova.network.neutron [req-0d0c662c-a6a1-4d57-8129-7138cd37dc3c req-2f285c5d-46cd-49b7-a450-8a953c1cd688 service nova] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Refreshing network info cache for port d1611897-c41e-41b8-a60f-5c6f36a7028f {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 847.608715] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:86:ba:e8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '48512b02-ad5c-4105-ba7d-fd4775acf8e1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd1611897-c41e-41b8-a60f-5c6f36a7028f', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 847.616091] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 847.620745] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 847.621382] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0fca6f47-9cbf-4294-8c3a-28e14c7c640e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.644413] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 847.644413] env[68674]: value = "task-3240157" [ 847.644413] env[68674]: _type = "Task" [ 847.644413] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.656308] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240157, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.691217] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b96eb824-3282-4988-8d0d-c03bb91e694d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.698548] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acee7b81-f4a7-45e5-8c9d-7f48318379d3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.734360] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92103219-7bb9-4c3d-9cbe-36bb521dcde8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.745618] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9587d2ce-1ecf-4f03-b47a-de1ce56ca94b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.761149] env[68674]: DEBUG nova.compute.provider_tree [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 847.782178] env[68674]: DEBUG oslo_vmware.api [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240154, 'name': ReconfigVM_Task, 'duration_secs': 1.109014} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.782476] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Reconfigured VM instance instance-00000041 to attach disk [datastore2] 63d6c185-db2c-4ede-a716-9a0dd432ab1f/63d6c185-db2c-4ede-a716-9a0dd432ab1f.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 847.783218] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-62fbb4b6-dfc6-46e9-bf37-1afb54b83fae {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.789375] env[68674]: DEBUG oslo_vmware.api [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 847.789375] env[68674]: value = "task-3240158" [ 847.789375] env[68674]: _type = "Task" [ 847.789375] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.799651] env[68674]: DEBUG oslo_vmware.api [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240158, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.818995] env[68674]: DEBUG nova.compute.manager [None req-0554f438-d8f4-4c3a-955d-98d0a296da5c tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 847.819384] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0554f438-d8f4-4c3a-955d-98d0a296da5c tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 847.820166] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c689fc6e-6022-418d-be55-dd1bfe50f775 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.827798] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0554f438-d8f4-4c3a-955d-98d0a296da5c tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 847.828155] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4a38c90a-9010-4a1a-bcb2-e68be5182427 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.834180] env[68674]: DEBUG oslo_vmware.api [None req-0554f438-d8f4-4c3a-955d-98d0a296da5c tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Waiting for the task: (returnval){ [ 847.834180] env[68674]: value = "task-3240159" [ 847.834180] env[68674]: _type = "Task" [ 847.834180] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.842206] env[68674]: DEBUG oslo_vmware.api [None req-0554f438-d8f4-4c3a-955d-98d0a296da5c tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Task: {'id': task-3240159, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.882891] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3240156, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069607} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.883238] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 847.884067] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7022399-e8f8-440a-b710-13ee4f7b2ad4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.906954] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] f3e7cacd-20d3-4dbe-89b0-80d89173069a/f3e7cacd-20d3-4dbe-89b0-80d89173069a.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 847.907785] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2025bb1b-8596-4e0b-ba21-0e8a77c560de {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.924049] env[68674]: DEBUG nova.network.neutron [req-0d0c662c-a6a1-4d57-8129-7138cd37dc3c req-2f285c5d-46cd-49b7-a450-8a953c1cd688 service nova] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Updated VIF entry in instance network info cache for port d1611897-c41e-41b8-a60f-5c6f36a7028f. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 847.924912] env[68674]: DEBUG nova.network.neutron [req-0d0c662c-a6a1-4d57-8129-7138cd37dc3c req-2f285c5d-46cd-49b7-a450-8a953c1cd688 service nova] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Updating instance_info_cache with network_info: [{"id": "d1611897-c41e-41b8-a60f-5c6f36a7028f", "address": "fa:16:3e:86:ba:e8", "network": {"id": "f8839054-ed22-4f0f-abbf-f8ae7ca5f59d", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-782448124-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38a52a0d746a4f16b5e4d5a6c984cc45", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48512b02-ad5c-4105-ba7d-fd4775acf8e1", "external-id": "nsx-vlan-transportzone-516", "segmentation_id": 516, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1611897-c4", "ovs_interfaceid": "d1611897-c41e-41b8-a60f-5c6f36a7028f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.927986] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 847.927986] env[68674]: value = "task-3240160" [ 847.927986] env[68674]: _type = "Task" [ 847.927986] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.938581] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3240160, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.953133] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 848.153905] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240157, 'name': CreateVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.241434] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6790a86d-6cdc-45f5-b6ed-4e2474fc2346 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Acquiring lock "6803af03-b1d5-47e6-9471-5213469e4103" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 848.241736] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6790a86d-6cdc-45f5-b6ed-4e2474fc2346 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Lock "6803af03-b1d5-47e6-9471-5213469e4103" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 848.241948] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6790a86d-6cdc-45f5-b6ed-4e2474fc2346 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Acquiring lock "6803af03-b1d5-47e6-9471-5213469e4103-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 848.242220] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6790a86d-6cdc-45f5-b6ed-4e2474fc2346 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Lock "6803af03-b1d5-47e6-9471-5213469e4103-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 848.242420] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6790a86d-6cdc-45f5-b6ed-4e2474fc2346 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Lock "6803af03-b1d5-47e6-9471-5213469e4103-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 848.244757] env[68674]: INFO nova.compute.manager [None req-6790a86d-6cdc-45f5-b6ed-4e2474fc2346 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Terminating instance [ 848.266795] env[68674]: DEBUG nova.scheduler.client.report [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 848.299617] env[68674]: DEBUG oslo_vmware.api [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240158, 'name': Rename_Task, 'duration_secs': 0.244048} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.299896] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 848.300176] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a85917d4-b117-43fc-b30c-3ce16e7108d1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.308708] env[68674]: DEBUG oslo_vmware.api [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 848.308708] env[68674]: value = "task-3240161" [ 848.308708] env[68674]: _type = "Task" [ 848.308708] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.316627] env[68674]: DEBUG oslo_vmware.api [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240161, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.343325] env[68674]: DEBUG oslo_vmware.api [None req-0554f438-d8f4-4c3a-955d-98d0a296da5c tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Task: {'id': task-3240159, 'name': PowerOffVM_Task, 'duration_secs': 0.264616} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.343587] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0554f438-d8f4-4c3a-955d-98d0a296da5c tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 848.343760] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0554f438-d8f4-4c3a-955d-98d0a296da5c tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 848.344017] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-27195227-fe26-4dde-aa8b-8a516b143d69 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.415464] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0554f438-d8f4-4c3a-955d-98d0a296da5c tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 848.415688] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0554f438-d8f4-4c3a-955d-98d0a296da5c tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 848.415872] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-0554f438-d8f4-4c3a-955d-98d0a296da5c tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Deleting the datastore file [datastore2] 2ed83aff-9a73-464b-914a-479d91cdfce0 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 848.416157] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e89640ed-0471-4624-916f-a996db2d9439 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.422841] env[68674]: DEBUG oslo_vmware.api [None req-0554f438-d8f4-4c3a-955d-98d0a296da5c tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Waiting for the task: (returnval){ [ 848.422841] env[68674]: value = "task-3240163" [ 848.422841] env[68674]: _type = "Task" [ 848.422841] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.427537] env[68674]: DEBUG oslo_concurrency.lockutils [req-0d0c662c-a6a1-4d57-8129-7138cd37dc3c req-2f285c5d-46cd-49b7-a450-8a953c1cd688 service nova] Releasing lock "refresh_cache-3a0a7950-af31-4a20-a19d-44fbce8735a2" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 848.431506] env[68674]: DEBUG oslo_vmware.api [None req-0554f438-d8f4-4c3a-955d-98d0a296da5c tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Task: {'id': task-3240163, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.439637] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3240160, 'name': ReconfigVM_Task, 'duration_secs': 0.378619} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.439923] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Reconfigured VM instance instance-00000042 to attach disk [datastore1] f3e7cacd-20d3-4dbe-89b0-80d89173069a/f3e7cacd-20d3-4dbe-89b0-80d89173069a.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 848.440617] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9f61db06-a8ca-4a4e-a7b9-7906e875d7cb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.448025] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 848.448025] env[68674]: value = "task-3240164" [ 848.448025] env[68674]: _type = "Task" [ 848.448025] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.458150] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3240164, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.654151] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240157, 'name': CreateVM_Task, 'duration_secs': 0.514167} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.654396] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 848.655209] env[68674]: DEBUG oslo_concurrency.lockutils [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.655389] env[68674]: DEBUG oslo_concurrency.lockutils [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 848.655724] env[68674]: DEBUG oslo_concurrency.lockutils [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 848.655987] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce35fcaa-d3bd-4017-847e-491e70da3bfa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.660407] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 848.660407] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5241d352-3893-901e-773c-5983f8fa7b76" [ 848.660407] env[68674]: _type = "Task" [ 848.660407] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.668425] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5241d352-3893-901e-773c-5983f8fa7b76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.748763] env[68674]: DEBUG nova.compute.manager [None req-6790a86d-6cdc-45f5-b6ed-4e2474fc2346 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 848.749148] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6790a86d-6cdc-45f5-b6ed-4e2474fc2346 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 848.749952] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de287f16-650f-46a5-a613-cb7b1402524f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.758253] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6790a86d-6cdc-45f5-b6ed-4e2474fc2346 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 848.758501] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3fecb08a-097d-4ec4-a0b9-d66f739e9378 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.764465] env[68674]: DEBUG oslo_vmware.api [None req-6790a86d-6cdc-45f5-b6ed-4e2474fc2346 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for the task: (returnval){ [ 848.764465] env[68674]: value = "task-3240165" [ 848.764465] env[68674]: _type = "Task" [ 848.764465] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.774914] env[68674]: DEBUG oslo_concurrency.lockutils [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.096s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 848.778463] env[68674]: DEBUG oslo_vmware.api [None req-6790a86d-6cdc-45f5-b6ed-4e2474fc2346 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3240165, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.779215] env[68674]: DEBUG oslo_concurrency.lockutils [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.103s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 848.780778] env[68674]: INFO nova.compute.claims [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 848.803443] env[68674]: INFO nova.scheduler.client.report [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Deleted allocations for instance 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f [ 848.819332] env[68674]: DEBUG oslo_vmware.api [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240161, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.933767] env[68674]: DEBUG oslo_vmware.api [None req-0554f438-d8f4-4c3a-955d-98d0a296da5c tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Task: {'id': task-3240163, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.245327} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.934054] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-0554f438-d8f4-4c3a-955d-98d0a296da5c tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 848.934246] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0554f438-d8f4-4c3a-955d-98d0a296da5c tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 848.934465] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0554f438-d8f4-4c3a-955d-98d0a296da5c tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 848.934678] env[68674]: INFO nova.compute.manager [None req-0554f438-d8f4-4c3a-955d-98d0a296da5c tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Took 1.12 seconds to destroy the instance on the hypervisor. [ 848.934950] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0554f438-d8f4-4c3a-955d-98d0a296da5c tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 848.935164] env[68674]: DEBUG nova.compute.manager [-] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 848.935274] env[68674]: DEBUG nova.network.neutron [-] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 848.957362] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3240164, 'name': Rename_Task, 'duration_secs': 0.15983} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.958354] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 848.958354] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0342e221-dd22-4623-81ed-aba1bcb06458 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.964055] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 848.964055] env[68674]: value = "task-3240166" [ 848.964055] env[68674]: _type = "Task" [ 848.964055] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.972520] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3240166, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.176269] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5241d352-3893-901e-773c-5983f8fa7b76, 'name': SearchDatastore_Task, 'duration_secs': 0.01047} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.176269] env[68674]: DEBUG oslo_concurrency.lockutils [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 849.176269] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 849.176269] env[68674]: DEBUG oslo_concurrency.lockutils [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.176269] env[68674]: DEBUG oslo_concurrency.lockutils [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 849.176269] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 849.176269] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fd38cc34-81b0-421e-a1f3-e887dd5890c6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.188972] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 849.188972] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 849.188972] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7dfe7bd9-3f4e-4ef6-b760-92b4e37402da {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.196083] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 849.196083] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52724297-ec04-f353-01a4-9c61431db36f" [ 849.196083] env[68674]: _type = "Task" [ 849.196083] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.204854] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52724297-ec04-f353-01a4-9c61431db36f, 'name': SearchDatastore_Task, 'duration_secs': 0.008332} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.206323] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbd39fa4-07c1-45cc-8924-36a9fec21450 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.211975] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 849.211975] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a0588b-c0e9-6429-a5a8-eda4c7819108" [ 849.211975] env[68674]: _type = "Task" [ 849.211975] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.214382] env[68674]: DEBUG nova.compute.manager [req-d22a06f5-70c0-4228-b857-0ad860a5047d req-e5c310a7-f6a1-4845-a157-fb290a3bb386 service nova] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Received event network-vif-deleted-e952a804-62fd-4a82-b6de-457e739b719a {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 849.214616] env[68674]: INFO nova.compute.manager [req-d22a06f5-70c0-4228-b857-0ad860a5047d req-e5c310a7-f6a1-4845-a157-fb290a3bb386 service nova] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Neutron deleted interface e952a804-62fd-4a82-b6de-457e739b719a; detaching it from the instance and deleting it from the info cache [ 849.214833] env[68674]: DEBUG nova.network.neutron [req-d22a06f5-70c0-4228-b857-0ad860a5047d req-e5c310a7-f6a1-4845-a157-fb290a3bb386 service nova] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.225497] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a0588b-c0e9-6429-a5a8-eda4c7819108, 'name': SearchDatastore_Task, 'duration_secs': 0.008665} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.225927] env[68674]: DEBUG oslo_concurrency.lockutils [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 849.226603] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 3a0a7950-af31-4a20-a19d-44fbce8735a2/3a0a7950-af31-4a20-a19d-44fbce8735a2.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 849.226603] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8e86530c-895c-4aba-9dfc-8086920ae559 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.232702] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 849.232702] env[68674]: value = "task-3240167" [ 849.232702] env[68674]: _type = "Task" [ 849.232702] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.242714] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3240167, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.278097] env[68674]: DEBUG oslo_vmware.api [None req-6790a86d-6cdc-45f5-b6ed-4e2474fc2346 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3240165, 'name': PowerOffVM_Task, 'duration_secs': 0.198833} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.278376] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6790a86d-6cdc-45f5-b6ed-4e2474fc2346 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 849.278546] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6790a86d-6cdc-45f5-b6ed-4e2474fc2346 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 849.278797] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fe258929-4a3b-4ee1-b873-81985fc26362 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.311835] env[68674]: DEBUG oslo_concurrency.lockutils [None req-44b5229e-c2c2-4d19-9633-371685be3add tempest-ServersListShow296Test-1809466911 tempest-ServersListShow296Test-1809466911-project-member] Lock "1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.446s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 849.320944] env[68674]: DEBUG oslo_vmware.api [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240161, 'name': PowerOnVM_Task, 'duration_secs': 0.550104} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.321247] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 849.321458] env[68674]: INFO nova.compute.manager [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Took 8.95 seconds to spawn the instance on the hypervisor. [ 849.321636] env[68674]: DEBUG nova.compute.manager [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 849.322522] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3524ce0-7cea-42d8-9c7f-9e35634d7dbb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.347103] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6790a86d-6cdc-45f5-b6ed-4e2474fc2346 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 849.347368] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6790a86d-6cdc-45f5-b6ed-4e2474fc2346 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 849.347567] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-6790a86d-6cdc-45f5-b6ed-4e2474fc2346 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Deleting the datastore file [datastore2] 6803af03-b1d5-47e6-9471-5213469e4103 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 849.347834] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2d6cbc65-0d0d-490b-96b1-095a82f206b1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.354162] env[68674]: DEBUG oslo_vmware.api [None req-6790a86d-6cdc-45f5-b6ed-4e2474fc2346 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for the task: (returnval){ [ 849.354162] env[68674]: value = "task-3240169" [ 849.354162] env[68674]: _type = "Task" [ 849.354162] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.362412] env[68674]: DEBUG oslo_vmware.api [None req-6790a86d-6cdc-45f5-b6ed-4e2474fc2346 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3240169, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.479907] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3240166, 'name': PowerOnVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.697136] env[68674]: DEBUG nova.network.neutron [-] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.721509] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3d7dcb95-c490-4ca9-b75b-0dd5091e5a75 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.730585] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4803c1e7-8317-4320-a4d0-81c276bdfa37 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.751795] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3240167, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.453631} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.752434] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 3a0a7950-af31-4a20-a19d-44fbce8735a2/3a0a7950-af31-4a20-a19d-44fbce8735a2.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 849.752808] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 849.753183] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dac4c6a7-787b-4cba-ae74-2184528c75ec {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.778036] env[68674]: DEBUG nova.compute.manager [req-d22a06f5-70c0-4228-b857-0ad860a5047d req-e5c310a7-f6a1-4845-a157-fb290a3bb386 service nova] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Detach interface failed, port_id=e952a804-62fd-4a82-b6de-457e739b719a, reason: Instance 2ed83aff-9a73-464b-914a-479d91cdfce0 could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 849.779428] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 849.779428] env[68674]: value = "task-3240170" [ 849.779428] env[68674]: _type = "Task" [ 849.779428] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.786933] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3240170, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.840764] env[68674]: INFO nova.compute.manager [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Took 47.07 seconds to build instance. [ 849.864818] env[68674]: DEBUG oslo_vmware.api [None req-6790a86d-6cdc-45f5-b6ed-4e2474fc2346 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Task: {'id': task-3240169, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.416269} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.868358] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-6790a86d-6cdc-45f5-b6ed-4e2474fc2346 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 849.868564] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6790a86d-6cdc-45f5-b6ed-4e2474fc2346 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 849.868749] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6790a86d-6cdc-45f5-b6ed-4e2474fc2346 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 849.868919] env[68674]: INFO nova.compute.manager [None req-6790a86d-6cdc-45f5-b6ed-4e2474fc2346 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Took 1.12 seconds to destroy the instance on the hypervisor. [ 849.869180] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6790a86d-6cdc-45f5-b6ed-4e2474fc2346 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 849.869821] env[68674]: DEBUG nova.compute.manager [-] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 849.869955] env[68674]: DEBUG nova.network.neutron [-] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 849.975194] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3240166, 'name': PowerOnVM_Task, 'duration_secs': 0.6322} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.975487] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 849.975692] env[68674]: INFO nova.compute.manager [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Took 7.04 seconds to spawn the instance on the hypervisor. [ 849.975869] env[68674]: DEBUG nova.compute.manager [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 849.976634] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-703f4b05-94fe-406f-9207-aef0af264c23 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.199331] env[68674]: INFO nova.compute.manager [-] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Took 1.26 seconds to deallocate network for instance. [ 850.292194] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3240170, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075825} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.292709] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 850.296046] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ed249aa-eabc-48e1-bdb4-c1a2abc9b968 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.319391] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Reconfiguring VM instance instance-00000043 to attach disk [datastore2] 3a0a7950-af31-4a20-a19d-44fbce8735a2/3a0a7950-af31-4a20-a19d-44fbce8735a2.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 850.321488] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44991642-aab3-43a5-9ac4-aebb450aaf94 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.336020] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c495f0c-836d-4da7-92b7-0b36fd78efbb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.345405] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a1194c1b-ee19-4f4d-abf9-12919ccc03cc tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "63d6c185-db2c-4ede-a716-9a0dd432ab1f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.589s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 850.347810] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4fb65f5-5fc3-40ce-b0d6-70cf92fd3875 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.351178] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 850.351178] env[68674]: value = "task-3240171" [ 850.351178] env[68674]: _type = "Task" [ 850.351178] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.384246] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6736cd75-6141-4f24-8d8c-f82d91e3644a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.390079] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3240171, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.395058] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48220d59-7c76-49ea-a541-73590693dc45 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.410217] env[68674]: DEBUG nova.compute.provider_tree [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 850.496834] env[68674]: INFO nova.compute.manager [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Took 42.19 seconds to build instance. [ 850.709205] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0554f438-d8f4-4c3a-955d-98d0a296da5c tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 850.848136] env[68674]: DEBUG nova.compute.manager [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 850.866016] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3240171, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.913479] env[68674]: DEBUG nova.scheduler.client.report [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 850.999971] env[68674]: DEBUG oslo_concurrency.lockutils [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "f3e7cacd-20d3-4dbe-89b0-80d89173069a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.699s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 851.243500] env[68674]: DEBUG nova.compute.manager [req-e902fbda-e323-43be-adcb-82b10efe47fd req-a4e477e1-1205-4fba-a30e-370125debb17 service nova] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Received event network-vif-deleted-6670eed2-509b-430f-a00f-46293f18bba9 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 851.243709] env[68674]: INFO nova.compute.manager [req-e902fbda-e323-43be-adcb-82b10efe47fd req-a4e477e1-1205-4fba-a30e-370125debb17 service nova] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Neutron deleted interface 6670eed2-509b-430f-a00f-46293f18bba9; detaching it from the instance and deleting it from the info cache [ 851.243887] env[68674]: DEBUG nova.network.neutron [req-e902fbda-e323-43be-adcb-82b10efe47fd req-a4e477e1-1205-4fba-a30e-370125debb17 service nova] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.308821] env[68674]: DEBUG nova.network.neutron [-] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.366499] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3240171, 'name': ReconfigVM_Task, 'duration_secs': 0.971741} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.366786] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Reconfigured VM instance instance-00000043 to attach disk [datastore2] 3a0a7950-af31-4a20-a19d-44fbce8735a2/3a0a7950-af31-4a20-a19d-44fbce8735a2.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 851.367430] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-43111693-8383-4e9f-a60d-d3d0e7f585d8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.370361] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 851.375784] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 851.375784] env[68674]: value = "task-3240172" [ 851.375784] env[68674]: _type = "Task" [ 851.375784] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.384542] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3240172, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.418757] env[68674]: DEBUG oslo_concurrency.lockutils [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.640s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 851.419353] env[68674]: DEBUG nova.compute.manager [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 851.422732] env[68674]: DEBUG oslo_concurrency.lockutils [None req-04aa8ce3-175b-4d0a-8a2b-73b4712730b4 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.738s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 851.423079] env[68674]: DEBUG nova.objects.instance [None req-04aa8ce3-175b-4d0a-8a2b-73b4712730b4 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lazy-loading 'resources' on Instance uuid 02d4aee3-7267-4658-a277-8a9a00dd9f6e {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 851.501680] env[68674]: DEBUG nova.compute.manager [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 851.747247] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-76750dc7-2c9b-4bb8-b0d5-7a64529ef4ee {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.757167] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0d8cd8a-c565-40ee-a98c-27ec22b11135 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.792812] env[68674]: DEBUG nova.compute.manager [req-e902fbda-e323-43be-adcb-82b10efe47fd req-a4e477e1-1205-4fba-a30e-370125debb17 service nova] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Detach interface failed, port_id=6670eed2-509b-430f-a00f-46293f18bba9, reason: Instance 6803af03-b1d5-47e6-9471-5213469e4103 could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 851.793053] env[68674]: DEBUG nova.compute.manager [req-e902fbda-e323-43be-adcb-82b10efe47fd req-a4e477e1-1205-4fba-a30e-370125debb17 service nova] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Received event network-changed-2cf52206-a0c1-4b57-886d-23df69181f20 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 851.793239] env[68674]: DEBUG nova.compute.manager [req-e902fbda-e323-43be-adcb-82b10efe47fd req-a4e477e1-1205-4fba-a30e-370125debb17 service nova] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Refreshing instance network info cache due to event network-changed-2cf52206-a0c1-4b57-886d-23df69181f20. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 851.793459] env[68674]: DEBUG oslo_concurrency.lockutils [req-e902fbda-e323-43be-adcb-82b10efe47fd req-a4e477e1-1205-4fba-a30e-370125debb17 service nova] Acquiring lock "refresh_cache-63d6c185-db2c-4ede-a716-9a0dd432ab1f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.793599] env[68674]: DEBUG oslo_concurrency.lockutils [req-e902fbda-e323-43be-adcb-82b10efe47fd req-a4e477e1-1205-4fba-a30e-370125debb17 service nova] Acquired lock "refresh_cache-63d6c185-db2c-4ede-a716-9a0dd432ab1f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 851.793760] env[68674]: DEBUG nova.network.neutron [req-e902fbda-e323-43be-adcb-82b10efe47fd req-a4e477e1-1205-4fba-a30e-370125debb17 service nova] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Refreshing network info cache for port 2cf52206-a0c1-4b57-886d-23df69181f20 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 851.810919] env[68674]: INFO nova.compute.manager [-] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Took 1.94 seconds to deallocate network for instance. [ 851.886421] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3240172, 'name': Rename_Task, 'duration_secs': 0.177002} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.886708] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 851.886940] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2feff237-545d-4921-9179-e24e72520c03 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.892692] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 851.892692] env[68674]: value = "task-3240173" [ 851.892692] env[68674]: _type = "Task" [ 851.892692] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.905920] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3240173, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.930941] env[68674]: DEBUG nova.compute.utils [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 851.933577] env[68674]: DEBUG nova.compute.manager [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 851.933577] env[68674]: DEBUG nova.network.neutron [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 851.973039] env[68674]: DEBUG nova.policy [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1891413e35f845a2b761f474df3eb6c8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3dceab4b22c34737bc85ee5a5ded00d3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 852.026978] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 852.313284] env[68674]: DEBUG nova.network.neutron [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Successfully created port: 7e110d01-86a3-4380-aee9-f6baa501e7bf {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 852.319406] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6790a86d-6cdc-45f5-b6ed-4e2474fc2346 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 852.407103] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3240173, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.434099] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47143270-5586-4aac-8daf-0ce0dcc10a86 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.437979] env[68674]: DEBUG nova.compute.manager [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 852.447492] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7278747-2ed2-4aa4-b8fc-5001f99d895d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.499678] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f218007d-1fb6-4818-b0fc-b554dc2bd922 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.509518] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8234227b-1016-4b72-b063-2241fe9ce29c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.524820] env[68674]: DEBUG nova.compute.provider_tree [None req-04aa8ce3-175b-4d0a-8a2b-73b4712730b4 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 852.662715] env[68674]: DEBUG nova.network.neutron [req-e902fbda-e323-43be-adcb-82b10efe47fd req-a4e477e1-1205-4fba-a30e-370125debb17 service nova] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Updated VIF entry in instance network info cache for port 2cf52206-a0c1-4b57-886d-23df69181f20. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 852.662715] env[68674]: DEBUG nova.network.neutron [req-e902fbda-e323-43be-adcb-82b10efe47fd req-a4e477e1-1205-4fba-a30e-370125debb17 service nova] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Updating instance_info_cache with network_info: [{"id": "2cf52206-a0c1-4b57-886d-23df69181f20", "address": "fa:16:3e:be:8f:42", "network": {"id": "e4b29de6-94e6-452e-b362-eb8d7dd615b9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2121858122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2bca98e5a30741249b1bdee899ffe433", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721c6720-3ce0-450e-9951-a894f03acc27", "external-id": "nsx-vlan-transportzone-394", "segmentation_id": 394, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cf52206-a0", "ovs_interfaceid": "2cf52206-a0c1-4b57-886d-23df69181f20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.903877] env[68674]: DEBUG oslo_vmware.api [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3240173, 'name': PowerOnVM_Task, 'duration_secs': 0.603138} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.904165] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 852.904372] env[68674]: INFO nova.compute.manager [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Took 7.37 seconds to spawn the instance on the hypervisor. [ 852.904696] env[68674]: DEBUG nova.compute.manager [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 852.905947] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa2124e1-8250-4a06-80de-84ac40157929 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.028993] env[68674]: DEBUG nova.scheduler.client.report [None req-04aa8ce3-175b-4d0a-8a2b-73b4712730b4 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 853.166093] env[68674]: DEBUG oslo_concurrency.lockutils [req-e902fbda-e323-43be-adcb-82b10efe47fd req-a4e477e1-1205-4fba-a30e-370125debb17 service nova] Releasing lock "refresh_cache-63d6c185-db2c-4ede-a716-9a0dd432ab1f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 853.422779] env[68674]: INFO nova.compute.manager [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Took 43.81 seconds to build instance. [ 853.448588] env[68674]: DEBUG nova.compute.manager [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 853.476207] env[68674]: DEBUG nova.virt.hardware [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='a40aaf9f832caa82c455760d4f8ec1b5',container_format='bare',created_at=2025-04-03T08:11:01Z,direct_url=,disk_format='vmdk',id=bd367444-bce1-48c0-91d9-30bd5d973e39,min_disk=1,min_ram=0,name='tempest-test-snap-2041477030',owner='3dceab4b22c34737bc85ee5a5ded00d3',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2025-04-03T08:11:16Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 853.476491] env[68674]: DEBUG nova.virt.hardware [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 853.476639] env[68674]: DEBUG nova.virt.hardware [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 853.476820] env[68674]: DEBUG nova.virt.hardware [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 853.476963] env[68674]: DEBUG nova.virt.hardware [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 853.477193] env[68674]: DEBUG nova.virt.hardware [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 853.477360] env[68674]: DEBUG nova.virt.hardware [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 853.477482] env[68674]: DEBUG nova.virt.hardware [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 853.477649] env[68674]: DEBUG nova.virt.hardware [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 853.477801] env[68674]: DEBUG nova.virt.hardware [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 853.477967] env[68674]: DEBUG nova.virt.hardware [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 853.478861] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b30a117e-fca5-4733-aa86-ece4d274bc47 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.486670] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-869d143d-8021-4936-b572-ee0b2e13375a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.534174] env[68674]: DEBUG oslo_concurrency.lockutils [None req-04aa8ce3-175b-4d0a-8a2b-73b4712730b4 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.111s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 853.537421] env[68674]: DEBUG oslo_concurrency.lockutils [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 35.712s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 853.561490] env[68674]: INFO nova.scheduler.client.report [None req-04aa8ce3-175b-4d0a-8a2b-73b4712730b4 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Deleted allocations for instance 02d4aee3-7267-4658-a277-8a9a00dd9f6e [ 853.836852] env[68674]: DEBUG nova.compute.manager [req-e20c3525-cf09-480a-8d9c-2a9def2ea9ab req-9b6b310d-f739-4d17-8f54-d387f4f7f6c8 service nova] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Received event network-vif-plugged-7e110d01-86a3-4380-aee9-f6baa501e7bf {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 853.837101] env[68674]: DEBUG oslo_concurrency.lockutils [req-e20c3525-cf09-480a-8d9c-2a9def2ea9ab req-9b6b310d-f739-4d17-8f54-d387f4f7f6c8 service nova] Acquiring lock "95386cdb-c2e4-476a-8aaf-e10fdc40b591-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 853.837313] env[68674]: DEBUG oslo_concurrency.lockutils [req-e20c3525-cf09-480a-8d9c-2a9def2ea9ab req-9b6b310d-f739-4d17-8f54-d387f4f7f6c8 service nova] Lock "95386cdb-c2e4-476a-8aaf-e10fdc40b591-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 853.837487] env[68674]: DEBUG oslo_concurrency.lockutils [req-e20c3525-cf09-480a-8d9c-2a9def2ea9ab req-9b6b310d-f739-4d17-8f54-d387f4f7f6c8 service nova] Lock "95386cdb-c2e4-476a-8aaf-e10fdc40b591-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 853.837654] env[68674]: DEBUG nova.compute.manager [req-e20c3525-cf09-480a-8d9c-2a9def2ea9ab req-9b6b310d-f739-4d17-8f54-d387f4f7f6c8 service nova] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] No waiting events found dispatching network-vif-plugged-7e110d01-86a3-4380-aee9-f6baa501e7bf {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 853.837824] env[68674]: WARNING nova.compute.manager [req-e20c3525-cf09-480a-8d9c-2a9def2ea9ab req-9b6b310d-f739-4d17-8f54-d387f4f7f6c8 service nova] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Received unexpected event network-vif-plugged-7e110d01-86a3-4380-aee9-f6baa501e7bf for instance with vm_state building and task_state spawning. [ 853.924830] env[68674]: DEBUG oslo_concurrency.lockutils [None req-294df790-336e-441b-963e-1678013a07db tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "3a0a7950-af31-4a20-a19d-44fbce8735a2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.583s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 853.943016] env[68674]: DEBUG nova.network.neutron [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Successfully updated port: 7e110d01-86a3-4380-aee9-f6baa501e7bf {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 854.043857] env[68674]: INFO nova.compute.claims [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 854.071338] env[68674]: DEBUG oslo_concurrency.lockutils [None req-04aa8ce3-175b-4d0a-8a2b-73b4712730b4 tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "02d4aee3-7267-4658-a277-8a9a00dd9f6e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.058s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 854.449348] env[68674]: DEBUG oslo_concurrency.lockutils [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "refresh_cache-95386cdb-c2e4-476a-8aaf-e10fdc40b591" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.449553] env[68674]: DEBUG oslo_concurrency.lockutils [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquired lock "refresh_cache-95386cdb-c2e4-476a-8aaf-e10fdc40b591" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 854.449900] env[68674]: DEBUG nova.network.neutron [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 854.549849] env[68674]: INFO nova.compute.resource_tracker [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Updating resource usage from migration c843256d-6ec8-4075-9fbc-e7988cea5eb5 [ 854.679172] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c85eb1b3-03d1-4dc8-a507-8c579384834c tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquiring lock "f3e7cacd-20d3-4dbe-89b0-80d89173069a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 854.679315] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c85eb1b3-03d1-4dc8-a507-8c579384834c tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "f3e7cacd-20d3-4dbe-89b0-80d89173069a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 854.679540] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c85eb1b3-03d1-4dc8-a507-8c579384834c tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquiring lock "f3e7cacd-20d3-4dbe-89b0-80d89173069a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 854.679722] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c85eb1b3-03d1-4dc8-a507-8c579384834c tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "f3e7cacd-20d3-4dbe-89b0-80d89173069a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 854.679887] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c85eb1b3-03d1-4dc8-a507-8c579384834c tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "f3e7cacd-20d3-4dbe-89b0-80d89173069a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 854.686425] env[68674]: INFO nova.compute.manager [None req-c85eb1b3-03d1-4dc8-a507-8c579384834c tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Terminating instance [ 854.798853] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4075a884-e932-4551-ba25-d764dd3cbdba tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquiring lock "3a0a7950-af31-4a20-a19d-44fbce8735a2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 854.798853] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4075a884-e932-4551-ba25-d764dd3cbdba tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "3a0a7950-af31-4a20-a19d-44fbce8735a2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 854.798853] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4075a884-e932-4551-ba25-d764dd3cbdba tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquiring lock "3a0a7950-af31-4a20-a19d-44fbce8735a2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 854.798853] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4075a884-e932-4551-ba25-d764dd3cbdba tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "3a0a7950-af31-4a20-a19d-44fbce8735a2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 854.798853] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4075a884-e932-4551-ba25-d764dd3cbdba tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "3a0a7950-af31-4a20-a19d-44fbce8735a2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 854.799607] env[68674]: INFO nova.compute.manager [None req-4075a884-e932-4551-ba25-d764dd3cbdba tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Terminating instance [ 854.853649] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5ce4fdb8-498d-4a95-aa68-6844fa5a0d0e tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 854.853892] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5ce4fdb8-498d-4a95-aa68-6844fa5a0d0e tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 854.854109] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5ce4fdb8-498d-4a95-aa68-6844fa5a0d0e tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 854.854290] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5ce4fdb8-498d-4a95-aa68-6844fa5a0d0e tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 854.854453] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5ce4fdb8-498d-4a95-aa68-6844fa5a0d0e tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 854.856725] env[68674]: INFO nova.compute.manager [None req-5ce4fdb8-498d-4a95-aa68-6844fa5a0d0e tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Terminating instance [ 854.980621] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d10f367-e68a-441e-aa9c-e66bbabfafa8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.988496] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fc95ef7-0109-401c-91c9-f80004de84c2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.992049] env[68674]: DEBUG nova.network.neutron [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 855.022493] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa0bf9d0-f46f-45f7-878e-f31f209849a2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.029810] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27ab4860-2bbf-42fc-bdf7-a71ebe917a9a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.043113] env[68674]: DEBUG nova.compute.provider_tree [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 855.161161] env[68674]: DEBUG nova.network.neutron [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Updating instance_info_cache with network_info: [{"id": "7e110d01-86a3-4380-aee9-f6baa501e7bf", "address": "fa:16:3e:2b:fa:26", "network": {"id": "896418b0-8817-49dc-a965-e44ed5221810", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1185393062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3dceab4b22c34737bc85ee5a5ded00d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e7f6f41-f4eb-4832-a390-730fca1cf717", "external-id": "nsx-vlan-transportzone-724", "segmentation_id": 724, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e110d01-86", "ovs_interfaceid": "7e110d01-86a3-4380-aee9-f6baa501e7bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.189929] env[68674]: DEBUG nova.compute.manager [None req-c85eb1b3-03d1-4dc8-a507-8c579384834c tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 855.190208] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c85eb1b3-03d1-4dc8-a507-8c579384834c tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 855.191319] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b98de2d4-7c17-4a3f-8dc5-9b25eac71f2a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.199164] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c85eb1b3-03d1-4dc8-a507-8c579384834c tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 855.199439] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5737a2fc-c4d8-4faf-b9be-a8e50d281a48 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.205766] env[68674]: DEBUG oslo_vmware.api [None req-c85eb1b3-03d1-4dc8-a507-8c579384834c tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 855.205766] env[68674]: value = "task-3240174" [ 855.205766] env[68674]: _type = "Task" [ 855.205766] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.215211] env[68674]: DEBUG oslo_vmware.api [None req-c85eb1b3-03d1-4dc8-a507-8c579384834c tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3240174, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.304516] env[68674]: DEBUG nova.compute.manager [None req-4075a884-e932-4551-ba25-d764dd3cbdba tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 855.304516] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4075a884-e932-4551-ba25-d764dd3cbdba tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 855.305362] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a1aa496-80b3-45d9-943e-c3fbd5783ab1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.313659] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4075a884-e932-4551-ba25-d764dd3cbdba tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 855.313953] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-38cbbbd6-eb76-4928-b57d-edd99fc5a91a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.320270] env[68674]: DEBUG oslo_vmware.api [None req-4075a884-e932-4551-ba25-d764dd3cbdba tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 855.320270] env[68674]: value = "task-3240175" [ 855.320270] env[68674]: _type = "Task" [ 855.320270] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.328627] env[68674]: DEBUG oslo_vmware.api [None req-4075a884-e932-4551-ba25-d764dd3cbdba tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3240175, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.361926] env[68674]: DEBUG nova.compute.manager [None req-5ce4fdb8-498d-4a95-aa68-6844fa5a0d0e tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 855.362271] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5ce4fdb8-498d-4a95-aa68-6844fa5a0d0e tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 855.363194] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c2ec768-ed9c-43f5-ab05-00d24c72c9b6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.370836] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ce4fdb8-498d-4a95-aa68-6844fa5a0d0e tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 855.371072] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7f0e6b7c-2732-4247-b642-d20eec672346 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.376266] env[68674]: DEBUG oslo_vmware.api [None req-5ce4fdb8-498d-4a95-aa68-6844fa5a0d0e tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 855.376266] env[68674]: value = "task-3240176" [ 855.376266] env[68674]: _type = "Task" [ 855.376266] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.384274] env[68674]: DEBUG oslo_vmware.api [None req-5ce4fdb8-498d-4a95-aa68-6844fa5a0d0e tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3240176, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.547054] env[68674]: DEBUG nova.scheduler.client.report [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 855.664189] env[68674]: DEBUG oslo_concurrency.lockutils [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Releasing lock "refresh_cache-95386cdb-c2e4-476a-8aaf-e10fdc40b591" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 855.664644] env[68674]: DEBUG nova.compute.manager [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Instance network_info: |[{"id": "7e110d01-86a3-4380-aee9-f6baa501e7bf", "address": "fa:16:3e:2b:fa:26", "network": {"id": "896418b0-8817-49dc-a965-e44ed5221810", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1185393062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3dceab4b22c34737bc85ee5a5ded00d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e7f6f41-f4eb-4832-a390-730fca1cf717", "external-id": "nsx-vlan-transportzone-724", "segmentation_id": 724, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e110d01-86", "ovs_interfaceid": "7e110d01-86a3-4380-aee9-f6baa501e7bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 855.665193] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:fa:26', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8e7f6f41-f4eb-4832-a390-730fca1cf717', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7e110d01-86a3-4380-aee9-f6baa501e7bf', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 855.673743] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 855.673996] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 855.674368] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-806e52ee-9e7c-4288-9759-4ab7bcc9289b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.699891] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 855.699891] env[68674]: value = "task-3240177" [ 855.699891] env[68674]: _type = "Task" [ 855.699891] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.707945] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240177, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.715237] env[68674]: DEBUG oslo_vmware.api [None req-c85eb1b3-03d1-4dc8-a507-8c579384834c tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3240174, 'name': PowerOffVM_Task, 'duration_secs': 0.395667} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.715533] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c85eb1b3-03d1-4dc8-a507-8c579384834c tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 855.715716] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c85eb1b3-03d1-4dc8-a507-8c579384834c tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 855.716110] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1e28e6c7-279f-47b6-8cd2-fcc1864686be {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.785573] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c85eb1b3-03d1-4dc8-a507-8c579384834c tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 855.785883] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c85eb1b3-03d1-4dc8-a507-8c579384834c tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 855.786206] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-c85eb1b3-03d1-4dc8-a507-8c579384834c tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Deleting the datastore file [datastore1] f3e7cacd-20d3-4dbe-89b0-80d89173069a {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 855.786557] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eb782852-4f76-495d-83c0-b6ea5f94cb51 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.793957] env[68674]: DEBUG oslo_vmware.api [None req-c85eb1b3-03d1-4dc8-a507-8c579384834c tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 855.793957] env[68674]: value = "task-3240179" [ 855.793957] env[68674]: _type = "Task" [ 855.793957] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.802323] env[68674]: DEBUG oslo_vmware.api [None req-c85eb1b3-03d1-4dc8-a507-8c579384834c tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3240179, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.830885] env[68674]: DEBUG oslo_vmware.api [None req-4075a884-e932-4551-ba25-d764dd3cbdba tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3240175, 'name': PowerOffVM_Task, 'duration_secs': 0.286949} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.831214] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4075a884-e932-4551-ba25-d764dd3cbdba tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 855.831413] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4075a884-e932-4551-ba25-d764dd3cbdba tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 855.831719] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-79308a1b-374e-43d7-9556-5b40f356377f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.887074] env[68674]: DEBUG oslo_vmware.api [None req-5ce4fdb8-498d-4a95-aa68-6844fa5a0d0e tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3240176, 'name': PowerOffVM_Task, 'duration_secs': 0.244599} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.888621] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ce4fdb8-498d-4a95-aa68-6844fa5a0d0e tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 855.888820] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5ce4fdb8-498d-4a95-aa68-6844fa5a0d0e tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 855.890221] env[68674]: DEBUG nova.compute.manager [req-ee213f86-f1b5-4ad9-adc9-b0242d9ae779 req-3adb03a4-df84-49ec-82c0-f720fd760266 service nova] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Received event network-changed-7e110d01-86a3-4380-aee9-f6baa501e7bf {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 855.890442] env[68674]: DEBUG nova.compute.manager [req-ee213f86-f1b5-4ad9-adc9-b0242d9ae779 req-3adb03a4-df84-49ec-82c0-f720fd760266 service nova] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Refreshing instance network info cache due to event network-changed-7e110d01-86a3-4380-aee9-f6baa501e7bf. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 855.890664] env[68674]: DEBUG oslo_concurrency.lockutils [req-ee213f86-f1b5-4ad9-adc9-b0242d9ae779 req-3adb03a4-df84-49ec-82c0-f720fd760266 service nova] Acquiring lock "refresh_cache-95386cdb-c2e4-476a-8aaf-e10fdc40b591" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.890831] env[68674]: DEBUG oslo_concurrency.lockutils [req-ee213f86-f1b5-4ad9-adc9-b0242d9ae779 req-3adb03a4-df84-49ec-82c0-f720fd760266 service nova] Acquired lock "refresh_cache-95386cdb-c2e4-476a-8aaf-e10fdc40b591" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 855.891019] env[68674]: DEBUG nova.network.neutron [req-ee213f86-f1b5-4ad9-adc9-b0242d9ae779 req-3adb03a4-df84-49ec-82c0-f720fd760266 service nova] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Refreshing network info cache for port 7e110d01-86a3-4380-aee9-f6baa501e7bf {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 855.896022] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b765eaf3-4e9c-4e35-91c7-e9fb4e3e11ac {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.896022] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4075a884-e932-4551-ba25-d764dd3cbdba tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 855.896251] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4075a884-e932-4551-ba25-d764dd3cbdba tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 855.896388] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-4075a884-e932-4551-ba25-d764dd3cbdba tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Deleting the datastore file [datastore2] 3a0a7950-af31-4a20-a19d-44fbce8735a2 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 855.897066] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c86cc04f-fde9-410e-ac3d-3ae74fe7940c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.904718] env[68674]: DEBUG oslo_vmware.api [None req-4075a884-e932-4551-ba25-d764dd3cbdba tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for the task: (returnval){ [ 855.904718] env[68674]: value = "task-3240182" [ 855.904718] env[68674]: _type = "Task" [ 855.904718] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.914427] env[68674]: DEBUG oslo_vmware.api [None req-4075a884-e932-4551-ba25-d764dd3cbdba tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3240182, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.968591] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5ce4fdb8-498d-4a95-aa68-6844fa5a0d0e tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 855.968849] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5ce4fdb8-498d-4a95-aa68-6844fa5a0d0e tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 855.968998] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ce4fdb8-498d-4a95-aa68-6844fa5a0d0e tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Deleting the datastore file [datastore2] baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 855.969289] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9de3ad1e-329c-4288-8d10-088e5ceb50e9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.976120] env[68674]: DEBUG oslo_vmware.api [None req-5ce4fdb8-498d-4a95-aa68-6844fa5a0d0e tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for the task: (returnval){ [ 855.976120] env[68674]: value = "task-3240183" [ 855.976120] env[68674]: _type = "Task" [ 855.976120] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.984122] env[68674]: DEBUG oslo_vmware.api [None req-5ce4fdb8-498d-4a95-aa68-6844fa5a0d0e tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3240183, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.053505] env[68674]: DEBUG oslo_concurrency.lockutils [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.517s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 856.053753] env[68674]: INFO nova.compute.manager [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Migrating [ 856.060789] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.525s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 856.061051] env[68674]: DEBUG nova.objects.instance [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Lazy-loading 'resources' on Instance uuid 7329e503-d87d-4e15-b181-65ac6e376781 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 856.211534] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240177, 'name': CreateVM_Task, 'duration_secs': 0.346153} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.211718] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 856.212396] env[68674]: DEBUG oslo_concurrency.lockutils [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/bd367444-bce1-48c0-91d9-30bd5d973e39" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.212560] env[68674]: DEBUG oslo_concurrency.lockutils [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquired lock "[datastore1] devstack-image-cache_base/bd367444-bce1-48c0-91d9-30bd5d973e39" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 856.212930] env[68674]: DEBUG oslo_concurrency.lockutils [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/bd367444-bce1-48c0-91d9-30bd5d973e39" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 856.213206] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d2cb1eb-fe49-4c6f-82c7-e12e39cc7c18 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.218640] env[68674]: DEBUG oslo_vmware.api [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 856.218640] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52cda506-e779-2219-fccc-5c641867e9b9" [ 856.218640] env[68674]: _type = "Task" [ 856.218640] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.226104] env[68674]: DEBUG oslo_vmware.api [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52cda506-e779-2219-fccc-5c641867e9b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.303839] env[68674]: DEBUG oslo_vmware.api [None req-c85eb1b3-03d1-4dc8-a507-8c579384834c tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3240179, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141766} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.306386] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-c85eb1b3-03d1-4dc8-a507-8c579384834c tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 856.306632] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c85eb1b3-03d1-4dc8-a507-8c579384834c tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 856.306854] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c85eb1b3-03d1-4dc8-a507-8c579384834c tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 856.307056] env[68674]: INFO nova.compute.manager [None req-c85eb1b3-03d1-4dc8-a507-8c579384834c tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Took 1.12 seconds to destroy the instance on the hypervisor. [ 856.307309] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c85eb1b3-03d1-4dc8-a507-8c579384834c tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 856.308461] env[68674]: DEBUG nova.compute.manager [-] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 856.308461] env[68674]: DEBUG nova.network.neutron [-] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 856.413655] env[68674]: DEBUG oslo_vmware.api [None req-4075a884-e932-4551-ba25-d764dd3cbdba tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Task: {'id': task-3240182, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152697} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.416087] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-4075a884-e932-4551-ba25-d764dd3cbdba tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 856.416285] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4075a884-e932-4551-ba25-d764dd3cbdba tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 856.416468] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4075a884-e932-4551-ba25-d764dd3cbdba tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 856.416654] env[68674]: INFO nova.compute.manager [None req-4075a884-e932-4551-ba25-d764dd3cbdba tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Took 1.11 seconds to destroy the instance on the hypervisor. [ 856.416952] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4075a884-e932-4551-ba25-d764dd3cbdba tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 856.417970] env[68674]: DEBUG nova.compute.manager [-] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 856.418094] env[68674]: DEBUG nova.network.neutron [-] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 856.493298] env[68674]: DEBUG oslo_vmware.api [None req-5ce4fdb8-498d-4a95-aa68-6844fa5a0d0e tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Task: {'id': task-3240183, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.185219} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.493508] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ce4fdb8-498d-4a95-aa68-6844fa5a0d0e tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 856.493691] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5ce4fdb8-498d-4a95-aa68-6844fa5a0d0e tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 856.493867] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5ce4fdb8-498d-4a95-aa68-6844fa5a0d0e tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 856.494055] env[68674]: INFO nova.compute.manager [None req-5ce4fdb8-498d-4a95-aa68-6844fa5a0d0e tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Took 1.13 seconds to destroy the instance on the hypervisor. [ 856.494313] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5ce4fdb8-498d-4a95-aa68-6844fa5a0d0e tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 856.494508] env[68674]: DEBUG nova.compute.manager [-] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 856.494601] env[68674]: DEBUG nova.network.neutron [-] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 856.547461] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc03e2b0-7cb0-451a-878c-561290c4c6f0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.556609] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a848efa8-9dc3-4b26-8f3a-c89a71bb96a2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.595850] env[68674]: DEBUG oslo_concurrency.lockutils [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "refresh_cache-e1283f87-5bdb-4d4e-a1c5-f3b1c9180188" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.596118] env[68674]: DEBUG oslo_concurrency.lockutils [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquired lock "refresh_cache-e1283f87-5bdb-4d4e-a1c5-f3b1c9180188" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 856.596341] env[68674]: DEBUG nova.network.neutron [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 856.602584] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-126c773d-4ef5-4e1b-b3ef-b58c057f6f59 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.613801] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e108b44c-4f31-45e8-a6f6-c544978f4f67 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.629143] env[68674]: DEBUG nova.compute.provider_tree [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 856.729407] env[68674]: DEBUG oslo_concurrency.lockutils [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Releasing lock "[datastore1] devstack-image-cache_base/bd367444-bce1-48c0-91d9-30bd5d973e39" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 856.729681] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Processing image bd367444-bce1-48c0-91d9-30bd5d973e39 {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 856.729930] env[68674]: DEBUG oslo_concurrency.lockutils [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/bd367444-bce1-48c0-91d9-30bd5d973e39/bd367444-bce1-48c0-91d9-30bd5d973e39.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.730091] env[68674]: DEBUG oslo_concurrency.lockutils [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquired lock "[datastore1] devstack-image-cache_base/bd367444-bce1-48c0-91d9-30bd5d973e39/bd367444-bce1-48c0-91d9-30bd5d973e39.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 856.730296] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 856.733797] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2e691fcc-0566-4b01-8871-0f7262d090e4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.744051] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 856.744051] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 856.744264] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e548e94-cdb7-414b-9a56-1db1dbb8444e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.750664] env[68674]: DEBUG oslo_vmware.api [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 856.750664] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524d366f-af17-1a05-3253-b054e6785384" [ 856.750664] env[68674]: _type = "Task" [ 856.750664] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.759260] env[68674]: DEBUG oslo_vmware.api [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524d366f-af17-1a05-3253-b054e6785384, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.914219] env[68674]: DEBUG nova.network.neutron [req-ee213f86-f1b5-4ad9-adc9-b0242d9ae779 req-3adb03a4-df84-49ec-82c0-f720fd760266 service nova] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Updated VIF entry in instance network info cache for port 7e110d01-86a3-4380-aee9-f6baa501e7bf. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 856.914587] env[68674]: DEBUG nova.network.neutron [req-ee213f86-f1b5-4ad9-adc9-b0242d9ae779 req-3adb03a4-df84-49ec-82c0-f720fd760266 service nova] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Updating instance_info_cache with network_info: [{"id": "7e110d01-86a3-4380-aee9-f6baa501e7bf", "address": "fa:16:3e:2b:fa:26", "network": {"id": "896418b0-8817-49dc-a965-e44ed5221810", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1185393062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3dceab4b22c34737bc85ee5a5ded00d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e7f6f41-f4eb-4832-a390-730fca1cf717", "external-id": "nsx-vlan-transportzone-724", "segmentation_id": 724, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e110d01-86", "ovs_interfaceid": "7e110d01-86a3-4380-aee9-f6baa501e7bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.990039] env[68674]: DEBUG nova.compute.manager [req-3512e678-4aca-4378-a0a7-c5d3cd05737e req-308aed44-a067-4a69-afa6-d5836b532f21 service nova] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Received event network-vif-deleted-d1611897-c41e-41b8-a60f-5c6f36a7028f {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 856.990297] env[68674]: INFO nova.compute.manager [req-3512e678-4aca-4378-a0a7-c5d3cd05737e req-308aed44-a067-4a69-afa6-d5836b532f21 service nova] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Neutron deleted interface d1611897-c41e-41b8-a60f-5c6f36a7028f; detaching it from the instance and deleting it from the info cache [ 856.990440] env[68674]: DEBUG nova.network.neutron [req-3512e678-4aca-4378-a0a7-c5d3cd05737e req-308aed44-a067-4a69-afa6-d5836b532f21 service nova] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.108022] env[68674]: DEBUG nova.network.neutron [-] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.156093] env[68674]: ERROR nova.scheduler.client.report [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [req-8a577cf0-3671-483f-83bb-3a4197d81002] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ade3f042-7427-494b-9654-0b65e074850c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-8a577cf0-3671-483f-83bb-3a4197d81002"}]} [ 857.173418] env[68674]: DEBUG nova.scheduler.client.report [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Refreshing inventories for resource provider ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 857.195239] env[68674]: DEBUG nova.scheduler.client.report [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Updating ProviderTree inventory for provider ade3f042-7427-494b-9654-0b65e074850c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 857.195239] env[68674]: DEBUG nova.compute.provider_tree [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 857.210137] env[68674]: DEBUG nova.scheduler.client.report [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Refreshing aggregate associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, aggregates: None {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 857.234208] env[68674]: DEBUG nova.scheduler.client.report [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Refreshing trait associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 857.264182] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Preparing fetch location {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 857.264314] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Fetch image to [datastore1] OSTACK_IMG_d800f2f2-1c9d-4af4-803e-fa11a722f231/OSTACK_IMG_d800f2f2-1c9d-4af4-803e-fa11a722f231.vmdk {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 857.264501] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Downloading stream optimized image bd367444-bce1-48c0-91d9-30bd5d973e39 to [datastore1] OSTACK_IMG_d800f2f2-1c9d-4af4-803e-fa11a722f231/OSTACK_IMG_d800f2f2-1c9d-4af4-803e-fa11a722f231.vmdk on the data store datastore1 as vApp {{(pid=68674) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 857.266583] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Downloading image file data bd367444-bce1-48c0-91d9-30bd5d973e39 to the ESX as VM named 'OSTACK_IMG_d800f2f2-1c9d-4af4-803e-fa11a722f231' {{(pid=68674) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 857.354431] env[68674]: DEBUG oslo_vmware.rw_handles [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 857.354431] env[68674]: value = "resgroup-9" [ 857.354431] env[68674]: _type = "ResourcePool" [ 857.354431] env[68674]: }. {{(pid=68674) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 857.354767] env[68674]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-161dfd06-c50f-4792-8f5a-cb925dc452bf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.372665] env[68674]: DEBUG nova.network.neutron [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Updating instance_info_cache with network_info: [{"id": "9f0aa506-1438-47ac-871c-632df3f943bf", "address": "fa:16:3e:e9:fe:db", "network": {"id": "d412f884-932c-461f-8f04-990897b04532", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-692483706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6b179855b874365964446f95f9f5a53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f0aa506-14", "ovs_interfaceid": "9f0aa506-1438-47ac-871c-632df3f943bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.382540] env[68674]: DEBUG oslo_vmware.rw_handles [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lease: (returnval){ [ 857.382540] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52bf9275-22f8-3576-cfcd-50c9fd4cff87" [ 857.382540] env[68674]: _type = "HttpNfcLease" [ 857.382540] env[68674]: } obtained for vApp import into resource pool (val){ [ 857.382540] env[68674]: value = "resgroup-9" [ 857.382540] env[68674]: _type = "ResourcePool" [ 857.382540] env[68674]: }. {{(pid=68674) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 857.382788] env[68674]: DEBUG oslo_vmware.api [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the lease: (returnval){ [ 857.382788] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52bf9275-22f8-3576-cfcd-50c9fd4cff87" [ 857.382788] env[68674]: _type = "HttpNfcLease" [ 857.382788] env[68674]: } to be ready. {{(pid=68674) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 857.394762] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 857.394762] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52bf9275-22f8-3576-cfcd-50c9fd4cff87" [ 857.394762] env[68674]: _type = "HttpNfcLease" [ 857.394762] env[68674]: } is initializing. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 857.417740] env[68674]: DEBUG oslo_concurrency.lockutils [req-ee213f86-f1b5-4ad9-adc9-b0242d9ae779 req-3adb03a4-df84-49ec-82c0-f720fd760266 service nova] Releasing lock "refresh_cache-95386cdb-c2e4-476a-8aaf-e10fdc40b591" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 857.477107] env[68674]: DEBUG nova.network.neutron [-] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.493101] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e50e868d-8160-4a7a-9158-130214a669b0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.502296] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7459354-f687-4d75-9f0a-99865d1d1f10 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.542443] env[68674]: DEBUG nova.network.neutron [-] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.543829] env[68674]: DEBUG nova.compute.manager [req-3512e678-4aca-4378-a0a7-c5d3cd05737e req-308aed44-a067-4a69-afa6-d5836b532f21 service nova] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Detach interface failed, port_id=d1611897-c41e-41b8-a60f-5c6f36a7028f, reason: Instance 3a0a7950-af31-4a20-a19d-44fbce8735a2 could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 857.610564] env[68674]: INFO nova.compute.manager [-] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Took 1.30 seconds to deallocate network for instance. [ 857.772509] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6983994a-4db8-402d-be00-4d341be4d313 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.780286] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eebc936-1008-4fed-bf38-63e16309a304 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.811415] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaf75562-89e3-41e6-adeb-c8d8be242798 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.819100] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74afa8e2-02a2-45bb-a494-bb3ef12dc5d8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.833224] env[68674]: DEBUG nova.compute.provider_tree [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 857.876977] env[68674]: DEBUG oslo_concurrency.lockutils [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Releasing lock "refresh_cache-e1283f87-5bdb-4d4e-a1c5-f3b1c9180188" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 857.890580] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 857.890580] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52bf9275-22f8-3576-cfcd-50c9fd4cff87" [ 857.890580] env[68674]: _type = "HttpNfcLease" [ 857.890580] env[68674]: } is ready. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 857.890817] env[68674]: DEBUG oslo_vmware.rw_handles [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 857.890817] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52bf9275-22f8-3576-cfcd-50c9fd4cff87" [ 857.890817] env[68674]: _type = "HttpNfcLease" [ 857.890817] env[68674]: }. {{(pid=68674) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 857.891520] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b030211-589e-46c7-8e5f-91ebd05cdaad {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.898939] env[68674]: DEBUG oslo_vmware.rw_handles [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a3f63e-7b6c-ac9a-8e07-a9670fb540b6/disk-0.vmdk from lease info. {{(pid=68674) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 857.899132] env[68674]: DEBUG oslo_vmware.rw_handles [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a3f63e-7b6c-ac9a-8e07-a9670fb540b6/disk-0.vmdk. {{(pid=68674) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 857.964522] env[68674]: DEBUG nova.compute.manager [req-ce44dc8a-1412-414e-b75e-f87e8c4e30cb req-f8006c8f-bed9-4436-9905-5689c0a71572 service nova] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Received event network-vif-deleted-9cbcb250-c330-4129-b949-513c43ad2f8d {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 857.970563] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-379df2a8-5254-4166-91d9-1dda66fefc1f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.983058] env[68674]: INFO nova.compute.manager [-] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Took 1.56 seconds to deallocate network for instance. [ 858.047529] env[68674]: INFO nova.compute.manager [-] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Took 1.55 seconds to deallocate network for instance. [ 858.117226] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c85eb1b3-03d1-4dc8-a507-8c579384834c tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 858.371105] env[68674]: DEBUG nova.scheduler.client.report [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Updated inventory for provider ade3f042-7427-494b-9654-0b65e074850c with generation 103 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 858.371400] env[68674]: DEBUG nova.compute.provider_tree [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Updating resource provider ade3f042-7427-494b-9654-0b65e074850c generation from 103 to 104 during operation: update_inventory {{(pid=68674) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 858.371587] env[68674]: DEBUG nova.compute.provider_tree [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 858.489919] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4075a884-e932-4551-ba25-d764dd3cbdba tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 858.554718] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5ce4fdb8-498d-4a95-aa68-6844fa5a0d0e tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 858.718547] env[68674]: DEBUG oslo_vmware.rw_handles [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Completed reading data from the image iterator. {{(pid=68674) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 858.718703] env[68674]: DEBUG oslo_vmware.rw_handles [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a3f63e-7b6c-ac9a-8e07-a9670fb540b6/disk-0.vmdk. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 858.719594] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c733824-2d66-4cc8-9ee7-4e44ba273bec {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.726970] env[68674]: DEBUG oslo_vmware.rw_handles [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a3f63e-7b6c-ac9a-8e07-a9670fb540b6/disk-0.vmdk is in state: ready. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 858.727185] env[68674]: DEBUG oslo_vmware.rw_handles [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a3f63e-7b6c-ac9a-8e07-a9670fb540b6/disk-0.vmdk. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 858.727437] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-33c23964-0716-4d05-8939-6e7c6112e8ad {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.879383] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.818s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 858.882433] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.486s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 858.884092] env[68674]: INFO nova.compute.claims [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 858.906835] env[68674]: INFO nova.scheduler.client.report [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Deleted allocations for instance 7329e503-d87d-4e15-b181-65ac6e376781 [ 858.965933] env[68674]: DEBUG oslo_vmware.rw_handles [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a3f63e-7b6c-ac9a-8e07-a9670fb540b6/disk-0.vmdk. {{(pid=68674) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 858.966196] env[68674]: INFO nova.virt.vmwareapi.images [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Downloaded image file data bd367444-bce1-48c0-91d9-30bd5d973e39 [ 858.967308] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f5e3ece-2e73-4595-8fb2-8c3d70209963 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.982620] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dca126cf-018e-4e5d-9bc1-05645a795684 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.006262] env[68674]: INFO nova.virt.vmwareapi.images [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] The imported VM was unregistered [ 859.008484] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Caching image {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 859.009257] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Creating directory with path [datastore1] devstack-image-cache_base/bd367444-bce1-48c0-91d9-30bd5d973e39 {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 859.009257] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f4c890ca-bebe-4d04-92fe-69fbfee864fe {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.021250] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Created directory with path [datastore1] devstack-image-cache_base/bd367444-bce1-48c0-91d9-30bd5d973e39 {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 859.021330] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_d800f2f2-1c9d-4af4-803e-fa11a722f231/OSTACK_IMG_d800f2f2-1c9d-4af4-803e-fa11a722f231.vmdk to [datastore1] devstack-image-cache_base/bd367444-bce1-48c0-91d9-30bd5d973e39/bd367444-bce1-48c0-91d9-30bd5d973e39.vmdk. {{(pid=68674) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 859.021732] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-b1a43ad7-8462-401d-b63b-82db6fe6dbfe {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.029790] env[68674]: DEBUG oslo_vmware.api [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 859.029790] env[68674]: value = "task-3240186" [ 859.029790] env[68674]: _type = "Task" [ 859.029790] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.038192] env[68674]: DEBUG oslo_vmware.api [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240186, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.045604] env[68674]: DEBUG nova.compute.manager [req-295b8ad2-0bde-4c3e-9b72-f5b9a5a68a52 req-0a5c6ace-52f1-4163-a4f8-5d31d52b849f service nova] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Received event network-vif-deleted-a459b31b-865e-45d7-a62b-b7c95eb50c15 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 859.397660] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b3dc84f-eed8-495e-8043-403bfcbf0875 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.419577] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Updating instance 'e1283f87-5bdb-4d4e-a1c5-f3b1c9180188' progress to 0 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 859.424354] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4b92239f-4cc2-4ccf-b8e1-d957b8fa9d7a tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Lock "7329e503-d87d-4e15-b181-65ac6e376781" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.436s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 859.541498] env[68674]: DEBUG oslo_vmware.api [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240186, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.926186] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 859.929491] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b4ac314c-540b-48c8-ad1f-5f3202e7522a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.936364] env[68674]: DEBUG oslo_vmware.api [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 859.936364] env[68674]: value = "task-3240187" [ 859.936364] env[68674]: _type = "Task" [ 859.936364] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.948106] env[68674]: DEBUG oslo_vmware.api [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240187, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.043906] env[68674]: DEBUG oslo_vmware.api [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240186, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.367068] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18cb4676-3ffd-4a14-bdf7-3c6d59448419 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.373632] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad897800-86cc-429d-a9db-c6fa92ba3f22 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.405681] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e496bb4-b31f-4971-b117-d4283efbc995 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.413426] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b7b3234-c65c-4f17-b85c-9bec333a3842 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.427962] env[68674]: DEBUG nova.compute.provider_tree [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 860.446435] env[68674]: DEBUG oslo_vmware.api [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240187, 'name': PowerOffVM_Task, 'duration_secs': 0.194639} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.446716] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 860.446945] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Updating instance 'e1283f87-5bdb-4d4e-a1c5-f3b1c9180188' progress to 17 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 860.543440] env[68674]: DEBUG oslo_vmware.api [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240186, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.931732] env[68674]: DEBUG nova.scheduler.client.report [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 860.953495] env[68674]: DEBUG nova.virt.hardware [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 860.953818] env[68674]: DEBUG nova.virt.hardware [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 860.954057] env[68674]: DEBUG nova.virt.hardware [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 860.954315] env[68674]: DEBUG nova.virt.hardware [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 860.954425] env[68674]: DEBUG nova.virt.hardware [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 860.954595] env[68674]: DEBUG nova.virt.hardware [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 860.954868] env[68674]: DEBUG nova.virt.hardware [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 860.955035] env[68674]: DEBUG nova.virt.hardware [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 860.955262] env[68674]: DEBUG nova.virt.hardware [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 860.955443] env[68674]: DEBUG nova.virt.hardware [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 860.955618] env[68674]: DEBUG nova.virt.hardware [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 860.961902] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c0a2eb80-2ab1-4422-82d4-d3694eb2bb28 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.980716] env[68674]: DEBUG oslo_vmware.api [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 860.980716] env[68674]: value = "task-3240188" [ 860.980716] env[68674]: _type = "Task" [ 860.980716] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.990028] env[68674]: DEBUG oslo_vmware.api [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240188, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.044559] env[68674]: DEBUG oslo_vmware.api [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240186, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.191157] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f613c16b-e449-41e1-85cf-c9f1425c4245 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Acquiring lock "3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 861.191364] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f613c16b-e449-41e1-85cf-c9f1425c4245 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Lock "3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 861.191573] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f613c16b-e449-41e1-85cf-c9f1425c4245 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Acquiring lock "3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 861.191759] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f613c16b-e449-41e1-85cf-c9f1425c4245 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Lock "3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 861.191932] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f613c16b-e449-41e1-85cf-c9f1425c4245 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Lock "3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 861.194642] env[68674]: INFO nova.compute.manager [None req-f613c16b-e449-41e1-85cf-c9f1425c4245 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Terminating instance [ 861.437017] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.555s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 861.437584] env[68674]: DEBUG nova.compute.manager [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 861.440378] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.430s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 861.441877] env[68674]: INFO nova.compute.claims [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 861.491023] env[68674]: DEBUG oslo_vmware.api [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240188, 'name': ReconfigVM_Task, 'duration_secs': 0.283501} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.491368] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Updating instance 'e1283f87-5bdb-4d4e-a1c5-f3b1c9180188' progress to 33 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 861.544502] env[68674]: DEBUG oslo_vmware.api [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240186, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.352363} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.544694] env[68674]: INFO nova.virt.vmwareapi.ds_util [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_d800f2f2-1c9d-4af4-803e-fa11a722f231/OSTACK_IMG_d800f2f2-1c9d-4af4-803e-fa11a722f231.vmdk to [datastore1] devstack-image-cache_base/bd367444-bce1-48c0-91d9-30bd5d973e39/bd367444-bce1-48c0-91d9-30bd5d973e39.vmdk. [ 861.544884] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Cleaning up location [datastore1] OSTACK_IMG_d800f2f2-1c9d-4af4-803e-fa11a722f231 {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 861.545057] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_d800f2f2-1c9d-4af4-803e-fa11a722f231 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 861.545300] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7728e054-aa33-468a-93a5-42057d1182d2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.551805] env[68674]: DEBUG oslo_vmware.api [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 861.551805] env[68674]: value = "task-3240189" [ 861.551805] env[68674]: _type = "Task" [ 861.551805] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.559391] env[68674]: DEBUG oslo_vmware.api [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240189, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.702150] env[68674]: DEBUG nova.compute.manager [None req-f613c16b-e449-41e1-85cf-c9f1425c4245 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 861.702596] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f613c16b-e449-41e1-85cf-c9f1425c4245 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 861.703559] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e930ac2c-4e2a-4427-ad77-80bcbbf890ea {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.711464] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f613c16b-e449-41e1-85cf-c9f1425c4245 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 861.711737] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-85b79a02-dffb-412e-a758-d56712a578b2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.718167] env[68674]: DEBUG oslo_vmware.api [None req-f613c16b-e449-41e1-85cf-c9f1425c4245 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Waiting for the task: (returnval){ [ 861.718167] env[68674]: value = "task-3240190" [ 861.718167] env[68674]: _type = "Task" [ 861.718167] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.727690] env[68674]: DEBUG oslo_vmware.api [None req-f613c16b-e449-41e1-85cf-c9f1425c4245 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3240190, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.946172] env[68674]: DEBUG nova.compute.utils [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 861.949545] env[68674]: DEBUG nova.compute.manager [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Not allocating networking since 'none' was specified. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 861.997747] env[68674]: DEBUG nova.virt.hardware [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 861.997961] env[68674]: DEBUG nova.virt.hardware [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 861.998131] env[68674]: DEBUG nova.virt.hardware [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 861.998317] env[68674]: DEBUG nova.virt.hardware [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 861.998467] env[68674]: DEBUG nova.virt.hardware [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 861.998612] env[68674]: DEBUG nova.virt.hardware [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 861.998816] env[68674]: DEBUG nova.virt.hardware [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 861.998975] env[68674]: DEBUG nova.virt.hardware [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 861.999547] env[68674]: DEBUG nova.virt.hardware [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 861.999547] env[68674]: DEBUG nova.virt.hardware [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 861.999547] env[68674]: DEBUG nova.virt.hardware [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 862.006025] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Reconfiguring VM instance instance-0000003b to detach disk 2000 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 862.006025] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-89b79f73-efda-4294-8e74-56c175cd9823 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.024655] env[68674]: DEBUG oslo_vmware.api [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 862.024655] env[68674]: value = "task-3240191" [ 862.024655] env[68674]: _type = "Task" [ 862.024655] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.032390] env[68674]: DEBUG oslo_vmware.api [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240191, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.062153] env[68674]: DEBUG oslo_vmware.api [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240189, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.036344} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.062524] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 862.062782] env[68674]: DEBUG oslo_concurrency.lockutils [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Releasing lock "[datastore1] devstack-image-cache_base/bd367444-bce1-48c0-91d9-30bd5d973e39/bd367444-bce1-48c0-91d9-30bd5d973e39.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 862.063060] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/bd367444-bce1-48c0-91d9-30bd5d973e39/bd367444-bce1-48c0-91d9-30bd5d973e39.vmdk to [datastore1] 95386cdb-c2e4-476a-8aaf-e10fdc40b591/95386cdb-c2e4-476a-8aaf-e10fdc40b591.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 862.063340] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-97b977ff-674c-4c06-a158-1f7e6cae8c7c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.069978] env[68674]: DEBUG oslo_vmware.api [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 862.069978] env[68674]: value = "task-3240192" [ 862.069978] env[68674]: _type = "Task" [ 862.069978] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.077577] env[68674]: DEBUG oslo_vmware.api [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240192, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.227752] env[68674]: DEBUG oslo_vmware.api [None req-f613c16b-e449-41e1-85cf-c9f1425c4245 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3240190, 'name': PowerOffVM_Task, 'duration_secs': 0.214665} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.227752] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f613c16b-e449-41e1-85cf-c9f1425c4245 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 862.227752] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f613c16b-e449-41e1-85cf-c9f1425c4245 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 862.227910] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-87f88255-e0f0-414d-968c-e9bcbc462aca {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.308652] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f613c16b-e449-41e1-85cf-c9f1425c4245 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 862.308949] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f613c16b-e449-41e1-85cf-c9f1425c4245 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 862.309176] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-f613c16b-e449-41e1-85cf-c9f1425c4245 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Deleting the datastore file [datastore1] 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 862.309480] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ceaf3a5e-3a13-448d-a3e7-639e6cb0ab68 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.316478] env[68674]: DEBUG oslo_vmware.api [None req-f613c16b-e449-41e1-85cf-c9f1425c4245 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Waiting for the task: (returnval){ [ 862.316478] env[68674]: value = "task-3240194" [ 862.316478] env[68674]: _type = "Task" [ 862.316478] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.328643] env[68674]: DEBUG oslo_vmware.api [None req-f613c16b-e449-41e1-85cf-c9f1425c4245 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3240194, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.450724] env[68674]: DEBUG nova.compute.manager [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 862.535271] env[68674]: DEBUG oslo_vmware.api [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240191, 'name': ReconfigVM_Task, 'duration_secs': 0.191816} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.538174] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Reconfigured VM instance instance-0000003b to detach disk 2000 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 862.539312] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-252904c9-4c16-4ce6-ba55-52ff82f40624 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.563715] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Reconfiguring VM instance instance-0000003b to attach disk [datastore2] e1283f87-5bdb-4d4e-a1c5-f3b1c9180188/e1283f87-5bdb-4d4e-a1c5-f3b1c9180188.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 862.567053] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-61f34109-522a-458e-92b1-6e41b2719f0e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.589940] env[68674]: DEBUG oslo_vmware.api [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240192, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.591464] env[68674]: DEBUG oslo_vmware.api [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 862.591464] env[68674]: value = "task-3240195" [ 862.591464] env[68674]: _type = "Task" [ 862.591464] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.603178] env[68674]: DEBUG oslo_vmware.api [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240195, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.828185] env[68674]: DEBUG oslo_vmware.api [None req-f613c16b-e449-41e1-85cf-c9f1425c4245 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3240194, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.975427] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a58034da-1194-4725-b1ce-46c4025e2d09 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.985021] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d789219b-c35d-47d0-ad40-2f08d5eeab37 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.015386] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-016874ac-4135-48e5-bb45-f1eb2cefc8ab {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.024887] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a78a3c36-3264-494f-979e-5da8a64f1280 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.040135] env[68674]: DEBUG nova.compute.provider_tree [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 863.091119] env[68674]: DEBUG oslo_vmware.api [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240192, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.106024] env[68674]: DEBUG oslo_vmware.api [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240195, 'name': ReconfigVM_Task, 'duration_secs': 0.308243} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.106389] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Reconfigured VM instance instance-0000003b to attach disk [datastore2] e1283f87-5bdb-4d4e-a1c5-f3b1c9180188/e1283f87-5bdb-4d4e-a1c5-f3b1c9180188.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 863.106646] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Updating instance 'e1283f87-5bdb-4d4e-a1c5-f3b1c9180188' progress to 50 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 863.328450] env[68674]: DEBUG oslo_vmware.api [None req-f613c16b-e449-41e1-85cf-c9f1425c4245 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3240194, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.465812] env[68674]: DEBUG nova.compute.manager [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 863.493706] env[68674]: DEBUG nova.virt.hardware [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 863.494319] env[68674]: DEBUG nova.virt.hardware [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 863.494584] env[68674]: DEBUG nova.virt.hardware [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 863.494791] env[68674]: DEBUG nova.virt.hardware [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 863.494975] env[68674]: DEBUG nova.virt.hardware [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 863.495179] env[68674]: DEBUG nova.virt.hardware [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 863.495431] env[68674]: DEBUG nova.virt.hardware [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 863.495633] env[68674]: DEBUG nova.virt.hardware [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 863.495853] env[68674]: DEBUG nova.virt.hardware [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 863.496068] env[68674]: DEBUG nova.virt.hardware [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 863.496287] env[68674]: DEBUG nova.virt.hardware [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 863.497191] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-840db2fe-47b4-4a18-bf8e-97212e9e70d9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.505720] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d4aa278-0a98-4054-946f-b14673bef5a0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.519659] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Instance VIF info [] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 863.525601] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Creating folder: Project (87d37e239fc34a918ab0aa76865fbee3). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 863.525970] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-177e40b5-058b-4183-b8e8-b70527973598 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.536818] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Created folder: Project (87d37e239fc34a918ab0aa76865fbee3) in parent group-v647377. [ 863.537031] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Creating folder: Instances. Parent ref: group-v647577. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 863.537285] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-961f0ccf-be5b-45ce-bab1-2c804a5b46c0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.544239] env[68674]: DEBUG nova.scheduler.client.report [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 863.549299] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Created folder: Instances in parent group-v647577. [ 863.549506] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 863.549946] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 863.550177] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-44cdf5b4-22d3-4952-917a-7bf5aaceb414 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.567679] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 863.567679] env[68674]: value = "task-3240198" [ 863.567679] env[68674]: _type = "Task" [ 863.567679] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.575858] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240198, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.590556] env[68674]: DEBUG oslo_vmware.api [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240192, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.613697] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fd1e1dc-3bc9-494f-8b68-0ba9c5415aab {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.635543] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70c3b6cc-a8f0-4ee7-9f80-16ffbbff842c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.658586] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Updating instance 'e1283f87-5bdb-4d4e-a1c5-f3b1c9180188' progress to 67 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 863.829995] env[68674]: DEBUG oslo_vmware.api [None req-f613c16b-e449-41e1-85cf-c9f1425c4245 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3240194, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.052154] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.612s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 864.052804] env[68674]: DEBUG nova.compute.manager [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 864.059589] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 37.772s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 864.078577] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240198, 'name': CreateVM_Task, 'duration_secs': 0.303033} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.079945] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 864.080551] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.080713] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 864.081156] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 864.081462] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0519cff-a4a5-43c6-a364-2f522b809e7f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.089521] env[68674]: DEBUG oslo_vmware.api [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Waiting for the task: (returnval){ [ 864.089521] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d606c8-bac9-733d-289c-1305ba82e0ae" [ 864.089521] env[68674]: _type = "Task" [ 864.089521] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.096707] env[68674]: DEBUG oslo_vmware.api [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240192, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.102780] env[68674]: DEBUG oslo_vmware.api [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d606c8-bac9-733d-289c-1305ba82e0ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.207728] env[68674]: DEBUG nova.network.neutron [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Port 9f0aa506-1438-47ac-871c-632df3f943bf binding to destination host cpu-1 is already ACTIVE {{(pid=68674) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 864.328659] env[68674]: DEBUG oslo_vmware.api [None req-f613c16b-e449-41e1-85cf-c9f1425c4245 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Task: {'id': task-3240194, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.839913} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.329857] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-f613c16b-e449-41e1-85cf-c9f1425c4245 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 864.329857] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f613c16b-e449-41e1-85cf-c9f1425c4245 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 864.329857] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f613c16b-e449-41e1-85cf-c9f1425c4245 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 864.329857] env[68674]: INFO nova.compute.manager [None req-f613c16b-e449-41e1-85cf-c9f1425c4245 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Took 2.63 seconds to destroy the instance on the hypervisor. [ 864.330054] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f613c16b-e449-41e1-85cf-c9f1425c4245 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 864.330239] env[68674]: DEBUG nova.compute.manager [-] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 864.330343] env[68674]: DEBUG nova.network.neutron [-] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 864.560272] env[68674]: DEBUG nova.compute.utils [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 864.574579] env[68674]: DEBUG nova.compute.manager [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Not allocating networking since 'none' was specified. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 864.577481] env[68674]: DEBUG nova.compute.manager [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 864.590476] env[68674]: DEBUG nova.compute.manager [req-fce2575b-6198-4e4f-bd38-8698d477d403 req-52c6748c-cc84-4af7-8d14-4078fa5fb48b service nova] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Received event network-vif-deleted-328800af-995b-4980-a8a3-4a51aa3c17e5 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 864.590632] env[68674]: INFO nova.compute.manager [req-fce2575b-6198-4e4f-bd38-8698d477d403 req-52c6748c-cc84-4af7-8d14-4078fa5fb48b service nova] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Neutron deleted interface 328800af-995b-4980-a8a3-4a51aa3c17e5; detaching it from the instance and deleting it from the info cache [ 864.590802] env[68674]: DEBUG nova.network.neutron [req-fce2575b-6198-4e4f-bd38-8698d477d403 req-52c6748c-cc84-4af7-8d14-4078fa5fb48b service nova] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.599366] env[68674]: DEBUG oslo_vmware.api [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240192, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.404052} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.600758] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/bd367444-bce1-48c0-91d9-30bd5d973e39/bd367444-bce1-48c0-91d9-30bd5d973e39.vmdk to [datastore1] 95386cdb-c2e4-476a-8aaf-e10fdc40b591/95386cdb-c2e4-476a-8aaf-e10fdc40b591.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 864.601554] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b271922-8b2c-4903-ac2b-fa1b1d867ac3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.607819] env[68674]: DEBUG oslo_vmware.api [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d606c8-bac9-733d-289c-1305ba82e0ae, 'name': SearchDatastore_Task, 'duration_secs': 0.020829} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.608359] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 864.608607] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 864.609920] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.609920] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 864.609920] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 864.609920] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c4b60c2f-5d8e-478f-8c89-51ac388a0286 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.628731] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 95386cdb-c2e4-476a-8aaf-e10fdc40b591/95386cdb-c2e4-476a-8aaf-e10fdc40b591.vmdk or device None with type streamOptimized {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 864.629919] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9e8c31d2-66af-4bdf-91b1-c6e35f80bfad {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.645378] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 864.645548] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 864.646838] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15c97130-cd80-4045-a5e9-16e4d18fd4de {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.651093] env[68674]: DEBUG oslo_vmware.api [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 864.651093] env[68674]: value = "task-3240199" [ 864.651093] env[68674]: _type = "Task" [ 864.651093] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.652318] env[68674]: DEBUG oslo_vmware.api [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Waiting for the task: (returnval){ [ 864.652318] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5260759c-b49d-e2e8-ec92-38464fb08b49" [ 864.652318] env[68674]: _type = "Task" [ 864.652318] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.664931] env[68674]: DEBUG oslo_vmware.api [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240199, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.667578] env[68674]: DEBUG oslo_vmware.api [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5260759c-b49d-e2e8-ec92-38464fb08b49, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.076136] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Applying migration context for instance e1283f87-5bdb-4d4e-a1c5-f3b1c9180188 as it has an incoming, in-progress migration c843256d-6ec8-4075-9fbc-e7988cea5eb5. Migration status is post-migrating {{(pid=68674) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 865.078937] env[68674]: INFO nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Updating resource usage from migration c843256d-6ec8-4075-9fbc-e7988cea5eb5 [ 865.079397] env[68674]: INFO nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Updating resource usage from migration dbcb4b5d-eb95-4807-bf85-9adfb2b1f475 [ 865.079601] env[68674]: INFO nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Updating resource usage from migration 8d2ad03f-6e50-4194-b3d7-a98f0bd46666 [ 865.083607] env[68674]: DEBUG nova.network.neutron [-] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.094153] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-765f83e4-3618-41d2-a929-7aa59126c715 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.105245] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-579dc1f4-8e3f-41c0-b502-de1f1edb12bd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.116522] env[68674]: WARNING nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 865.116710] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance fa89e0b5-590d-43fb-bb11-02f8fdee0c2f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 865.116827] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 23891bad-1b63-4237-9243-78954cf67d52 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 865.116924] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 865.117176] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 0eaf7d72-755b-4977-8f71-7d53ad1cf573 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 865.117290] env[68674]: WARNING nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 6803af03-b1d5-47e6-9471-5213469e4103 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 865.117413] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 5e3f667c-5d3a-4465-9186-779563087480 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 865.117529] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 77fa5a89-961b-4c84-a75e-a5be0253677e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 865.117671] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance f029042f-d80b-453e-adc9-1e65d7da7aaf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 865.117861] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 5c12cb5d-821c-4e63-86a0-dadc9794a8ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 865.117925] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance f69c5fcf-6d25-48a5-a154-c3632c76175a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 865.118044] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance c4fd04a7-2b11-4c4b-84d1-53edc1e3f035 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 865.118175] env[68674]: WARNING nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 5bd42044-84f5-4f48-aa97-b7cf990ed35d is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 865.118363] env[68674]: WARNING nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance a4cb1632-eada-4b10-a66f-64fecf45fd76 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 865.118410] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance e371ae6b-44fd-47ce-9c58-8981e7da5cbc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 865.118538] env[68674]: WARNING nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 2ed83aff-9a73-464b-914a-479d91cdfce0 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 865.118652] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 63d6c185-db2c-4ede-a716-9a0dd432ab1f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 865.118773] env[68674]: WARNING nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance f3e7cacd-20d3-4dbe-89b0-80d89173069a is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 865.118893] env[68674]: WARNING nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 3a0a7950-af31-4a20-a19d-44fbce8735a2 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 865.119014] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 95386cdb-c2e4-476a-8aaf-e10fdc40b591 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 865.119169] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Migration c843256d-6ec8-4075-9fbc-e7988cea5eb5 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 865.119252] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance e1283f87-5bdb-4d4e-a1c5-f3b1c9180188 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 865.119352] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 040d7108-8da1-4914-b7fd-03cf09ec68aa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 865.119460] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 867fd9ca-049f-441a-94bc-af60df598043 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 865.150730] env[68674]: DEBUG nova.compute.manager [req-fce2575b-6198-4e4f-bd38-8698d477d403 req-52c6748c-cc84-4af7-8d14-4078fa5fb48b service nova] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Detach interface failed, port_id=328800af-995b-4980-a8a3-4a51aa3c17e5, reason: Instance 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 865.168940] env[68674]: DEBUG oslo_vmware.api [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240199, 'name': ReconfigVM_Task, 'duration_secs': 0.341305} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.169307] env[68674]: DEBUG oslo_vmware.api [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5260759c-b49d-e2e8-ec92-38464fb08b49, 'name': SearchDatastore_Task, 'duration_secs': 0.028225} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.169629] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 95386cdb-c2e4-476a-8aaf-e10fdc40b591/95386cdb-c2e4-476a-8aaf-e10fdc40b591.vmdk or device None with type streamOptimized {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 865.170804] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-31784291-c0a5-4661-86a0-f673184914d1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.172342] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0b860a7-ac98-4df8-9861-c15ff01f45c3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.178386] env[68674]: DEBUG oslo_vmware.api [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Waiting for the task: (returnval){ [ 865.178386] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c31541-151d-e32a-4134-463a0cf2830f" [ 865.178386] env[68674]: _type = "Task" [ 865.178386] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.179687] env[68674]: DEBUG oslo_vmware.api [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 865.179687] env[68674]: value = "task-3240200" [ 865.179687] env[68674]: _type = "Task" [ 865.179687] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.191149] env[68674]: DEBUG oslo_vmware.api [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c31541-151d-e32a-4134-463a0cf2830f, 'name': SearchDatastore_Task, 'duration_secs': 0.010584} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.194141] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 865.194403] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 040d7108-8da1-4914-b7fd-03cf09ec68aa/040d7108-8da1-4914-b7fd-03cf09ec68aa.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 865.194694] env[68674]: DEBUG oslo_vmware.api [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240200, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.194890] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-03fce835-b220-459d-9f64-275ffc300cb1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.200657] env[68674]: DEBUG oslo_vmware.api [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Waiting for the task: (returnval){ [ 865.200657] env[68674]: value = "task-3240201" [ 865.200657] env[68674]: _type = "Task" [ 865.200657] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.208815] env[68674]: DEBUG oslo_vmware.api [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240201, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.231842] env[68674]: DEBUG oslo_concurrency.lockutils [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "e1283f87-5bdb-4d4e-a1c5-f3b1c9180188-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 865.232098] env[68674]: DEBUG oslo_concurrency.lockutils [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "e1283f87-5bdb-4d4e-a1c5-f3b1c9180188-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.232295] env[68674]: DEBUG oslo_concurrency.lockutils [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "e1283f87-5bdb-4d4e-a1c5-f3b1c9180188-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 865.590049] env[68674]: INFO nova.compute.manager [-] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Took 1.26 seconds to deallocate network for instance. [ 865.591234] env[68674]: DEBUG nova.compute.manager [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 865.624530] env[68674]: DEBUG nova.virt.hardware [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 865.624663] env[68674]: DEBUG nova.virt.hardware [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 865.624825] env[68674]: DEBUG nova.virt.hardware [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 865.625032] env[68674]: DEBUG nova.virt.hardware [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 865.625541] env[68674]: DEBUG nova.virt.hardware [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 865.625541] env[68674]: DEBUG nova.virt.hardware [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 865.625664] env[68674]: DEBUG nova.virt.hardware [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 865.625771] env[68674]: DEBUG nova.virt.hardware [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 865.625933] env[68674]: DEBUG nova.virt.hardware [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 865.626111] env[68674]: DEBUG nova.virt.hardware [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 865.626284] env[68674]: DEBUG nova.virt.hardware [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 865.626990] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 036fbca7-be6a-43c6-972e-a71524833498 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 865.627147] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Migration dbcb4b5d-eb95-4807-bf85-9adfb2b1f475 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 865.627271] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 3d85c8c4-f09c-4f75-aff5-9a49d84ae006 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 865.629057] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c8fa96c-d9f5-4636-b25e-47ca31725265 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.638923] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-495158e8-9a9d-4242-b9ba-f66d76934b12 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.654396] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Instance VIF info [] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 865.660033] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 865.660529] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 865.660748] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-be921b84-99d8-4c36-a1c9-a9c71cb399a8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.677618] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 865.677618] env[68674]: value = "task-3240202" [ 865.677618] env[68674]: _type = "Task" [ 865.677618] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.685277] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240202, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.693176] env[68674]: DEBUG oslo_vmware.api [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240200, 'name': Rename_Task, 'duration_secs': 0.232797} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.693451] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 865.693687] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0263f326-738f-491f-ac24-66c60617272e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.700022] env[68674]: DEBUG oslo_vmware.api [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 865.700022] env[68674]: value = "task-3240203" [ 865.700022] env[68674]: _type = "Task" [ 865.700022] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.712822] env[68674]: DEBUG oslo_vmware.api [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240201, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.44305} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.715573] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 040d7108-8da1-4914-b7fd-03cf09ec68aa/040d7108-8da1-4914-b7fd-03cf09ec68aa.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 865.715804] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 865.716080] env[68674]: DEBUG oslo_vmware.api [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240203, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.716283] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b887cc4a-806c-48ad-96c9-1efa511db9e6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.724104] env[68674]: DEBUG oslo_vmware.api [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Waiting for the task: (returnval){ [ 865.724104] env[68674]: value = "task-3240204" [ 865.724104] env[68674]: _type = "Task" [ 865.724104] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.732066] env[68674]: DEBUG oslo_vmware.api [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240204, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.102420] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f613c16b-e449-41e1-85cf-c9f1425c4245 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 866.134216] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 7d953e59-53c1-4041-a641-35c12c012f7e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 866.188420] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240202, 'name': CreateVM_Task, 'duration_secs': 0.392844} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.188519] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 866.189021] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.189821] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 866.189821] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 866.189821] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81585fcd-c9cd-42f7-a2d1-c5da0ae360ec {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.195694] env[68674]: DEBUG oslo_vmware.api [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Waiting for the task: (returnval){ [ 866.195694] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5252204c-8536-2978-18dd-e7456097a198" [ 866.195694] env[68674]: _type = "Task" [ 866.195694] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.207440] env[68674]: DEBUG oslo_vmware.api [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5252204c-8536-2978-18dd-e7456097a198, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.213266] env[68674]: DEBUG oslo_vmware.api [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240203, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.235213] env[68674]: DEBUG oslo_vmware.api [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240204, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075861} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.235495] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 866.236298] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51d112f0-5e9e-42b1-83f3-d3ed278b6845 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.259761] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Reconfiguring VM instance instance-00000045 to attach disk [datastore2] 040d7108-8da1-4914-b7fd-03cf09ec68aa/040d7108-8da1-4914-b7fd-03cf09ec68aa.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 866.260090] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7a6c137a-9422-42fc-93b9-f33c9dbd03eb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.279280] env[68674]: DEBUG oslo_concurrency.lockutils [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "refresh_cache-e1283f87-5bdb-4d4e-a1c5-f3b1c9180188" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.279456] env[68674]: DEBUG oslo_concurrency.lockutils [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquired lock "refresh_cache-e1283f87-5bdb-4d4e-a1c5-f3b1c9180188" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 866.279630] env[68674]: DEBUG nova.network.neutron [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 866.282961] env[68674]: DEBUG oslo_vmware.api [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Waiting for the task: (returnval){ [ 866.282961] env[68674]: value = "task-3240205" [ 866.282961] env[68674]: _type = "Task" [ 866.282961] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.291777] env[68674]: DEBUG oslo_vmware.api [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240205, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.636332] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance f147b483-9384-4fc1-996e-e8fb035c1942 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 866.707530] env[68674]: DEBUG oslo_vmware.api [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5252204c-8536-2978-18dd-e7456097a198, 'name': SearchDatastore_Task, 'duration_secs': 0.011896} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.707895] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 866.708162] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 866.708417] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.708573] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 866.708769] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 866.709037] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-798d3063-9356-41c1-ac2a-9805bbe9882b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.713277] env[68674]: DEBUG oslo_vmware.api [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240203, 'name': PowerOnVM_Task, 'duration_secs': 0.575091} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.714111] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 866.714315] env[68674]: INFO nova.compute.manager [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Took 13.27 seconds to spawn the instance on the hypervisor. [ 866.714502] env[68674]: DEBUG nova.compute.manager [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 866.715230] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d9166ab-291a-4a96-bba9-013623d870c6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.722156] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 866.722364] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 866.725593] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-601c0fe1-8486-48f8-9a37-03898f686e9b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.730716] env[68674]: DEBUG oslo_vmware.api [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Waiting for the task: (returnval){ [ 866.730716] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b2db22-19e3-d04a-ba67-11176cea5be8" [ 866.730716] env[68674]: _type = "Task" [ 866.730716] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.738188] env[68674]: DEBUG oslo_vmware.api [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b2db22-19e3-d04a-ba67-11176cea5be8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.794501] env[68674]: DEBUG oslo_vmware.api [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240205, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.012907] env[68674]: DEBUG nova.network.neutron [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Updating instance_info_cache with network_info: [{"id": "9f0aa506-1438-47ac-871c-632df3f943bf", "address": "fa:16:3e:e9:fe:db", "network": {"id": "d412f884-932c-461f-8f04-990897b04532", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-692483706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6b179855b874365964446f95f9f5a53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f0aa506-14", "ovs_interfaceid": "9f0aa506-1438-47ac-871c-632df3f943bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.141147] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance f6d28c5e-fe32-4c53-98ac-747a1b79e6c4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 867.238522] env[68674]: INFO nova.compute.manager [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Took 50.59 seconds to build instance. [ 867.243579] env[68674]: DEBUG oslo_vmware.api [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b2db22-19e3-d04a-ba67-11176cea5be8, 'name': SearchDatastore_Task, 'duration_secs': 0.009777} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.244439] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5637e150-cd5d-4b11-b30e-5e9f6e0e784c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.250363] env[68674]: DEBUG oslo_vmware.api [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Waiting for the task: (returnval){ [ 867.250363] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]529a7494-75a7-ee1d-0cf5-7b64f4f9f42a" [ 867.250363] env[68674]: _type = "Task" [ 867.250363] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.259588] env[68674]: DEBUG oslo_vmware.api [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]529a7494-75a7-ee1d-0cf5-7b64f4f9f42a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.299370] env[68674]: DEBUG oslo_vmware.api [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240205, 'name': ReconfigVM_Task, 'duration_secs': 0.852915} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.299633] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Reconfigured VM instance instance-00000045 to attach disk [datastore2] 040d7108-8da1-4914-b7fd-03cf09ec68aa/040d7108-8da1-4914-b7fd-03cf09ec68aa.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 867.300215] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-da75c105-5c5a-468d-866c-ab7ae54205ac {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.308477] env[68674]: DEBUG oslo_vmware.api [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Waiting for the task: (returnval){ [ 867.308477] env[68674]: value = "task-3240206" [ 867.308477] env[68674]: _type = "Task" [ 867.308477] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.316754] env[68674]: DEBUG oslo_vmware.api [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240206, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.515731] env[68674]: DEBUG oslo_concurrency.lockutils [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Releasing lock "refresh_cache-e1283f87-5bdb-4d4e-a1c5-f3b1c9180188" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 867.644950] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 867.740876] env[68674]: DEBUG oslo_concurrency.lockutils [None req-710f367b-d8cf-405c-ba92-2b3dcd4dc862 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "95386cdb-c2e4-476a-8aaf-e10fdc40b591" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.621s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 867.761771] env[68674]: DEBUG oslo_vmware.api [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]529a7494-75a7-ee1d-0cf5-7b64f4f9f42a, 'name': SearchDatastore_Task, 'duration_secs': 0.010566} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.762105] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 867.762390] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 867fd9ca-049f-441a-94bc-af60df598043/867fd9ca-049f-441a-94bc-af60df598043.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 867.762649] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aade4966-fdd6-48ca-864c-9d984b38a561 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.770887] env[68674]: DEBUG oslo_vmware.api [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Waiting for the task: (returnval){ [ 867.770887] env[68674]: value = "task-3240207" [ 867.770887] env[68674]: _type = "Task" [ 867.770887] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.778692] env[68674]: DEBUG oslo_vmware.api [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240207, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.794861] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6a36e8fd-fe07-4e35-a913-5b1b791be68a tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "95386cdb-c2e4-476a-8aaf-e10fdc40b591" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 867.795144] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6a36e8fd-fe07-4e35-a913-5b1b791be68a tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "95386cdb-c2e4-476a-8aaf-e10fdc40b591" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 867.795380] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6a36e8fd-fe07-4e35-a913-5b1b791be68a tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "95386cdb-c2e4-476a-8aaf-e10fdc40b591-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 867.795598] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6a36e8fd-fe07-4e35-a913-5b1b791be68a tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "95386cdb-c2e4-476a-8aaf-e10fdc40b591-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 867.795795] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6a36e8fd-fe07-4e35-a913-5b1b791be68a tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "95386cdb-c2e4-476a-8aaf-e10fdc40b591-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 867.798061] env[68674]: INFO nova.compute.manager [None req-6a36e8fd-fe07-4e35-a913-5b1b791be68a tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Terminating instance [ 867.818999] env[68674]: DEBUG oslo_vmware.api [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240206, 'name': Rename_Task, 'duration_secs': 0.141763} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.819282] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 867.819524] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6e1455c2-e3f9-4dab-8384-1c5312efe71e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.826194] env[68674]: DEBUG oslo_vmware.api [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Waiting for the task: (returnval){ [ 867.826194] env[68674]: value = "task-3240208" [ 867.826194] env[68674]: _type = "Task" [ 867.826194] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.835496] env[68674]: DEBUG oslo_vmware.api [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240208, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.044848] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c4bc21a-14ed-43c9-be73-7c258f375386 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.071646] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-397426fa-b932-4406-a54f-e2fdc93fb22b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.084061] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Updating instance 'e1283f87-5bdb-4d4e-a1c5-f3b1c9180188' progress to 83 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 868.148620] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 0e7c5243-ad33-4391-8977-b9019643e3de has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 868.148620] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Migration 8d2ad03f-6e50-4194-b3d7-a98f0bd46666 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 868.148620] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 7aa58e2f-1202-4252-9c38-ce53084c573f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 868.148620] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Total usable vcpus: 48, total allocated vcpus: 19 {{(pid=68674) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 868.148951] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4288MB phys_disk=200GB used_disk=20GB total_vcpus=48 used_vcpus=19 pci_stats=[] {{(pid=68674) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 868.284079] env[68674]: DEBUG oslo_vmware.api [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240207, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.474192} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.286529] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 867fd9ca-049f-441a-94bc-af60df598043/867fd9ca-049f-441a-94bc-af60df598043.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 868.286750] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 868.287184] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-363a9b9a-5882-448d-a886-58f89ee91197 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.295347] env[68674]: DEBUG oslo_vmware.api [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Waiting for the task: (returnval){ [ 868.295347] env[68674]: value = "task-3240209" [ 868.295347] env[68674]: _type = "Task" [ 868.295347] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.302367] env[68674]: DEBUG nova.compute.manager [None req-6a36e8fd-fe07-4e35-a913-5b1b791be68a tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 868.302663] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6a36e8fd-fe07-4e35-a913-5b1b791be68a tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 868.303699] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c61d0622-a44d-4ba1-b423-f701452f7a80 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.309501] env[68674]: DEBUG oslo_vmware.api [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240209, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.316412] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a36e8fd-fe07-4e35-a913-5b1b791be68a tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 868.316674] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-264dd8a6-a5da-4a82-a132-62d8d1ef3705 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.324037] env[68674]: DEBUG oslo_vmware.api [None req-6a36e8fd-fe07-4e35-a913-5b1b791be68a tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 868.324037] env[68674]: value = "task-3240210" [ 868.324037] env[68674]: _type = "Task" [ 868.324037] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.337793] env[68674]: DEBUG oslo_vmware.api [None req-6a36e8fd-fe07-4e35-a913-5b1b791be68a tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240210, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.341034] env[68674]: DEBUG oslo_vmware.api [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240208, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.587483] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dc1be57-e087-4fc2-8a3b-f2a64d3f1f46 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.592577] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 868.592885] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-df7017de-8ba6-4559-9d41-fe0ae114acd7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.601633] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0207ab6f-f20c-4b63-af62-618fe8569d66 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.605190] env[68674]: DEBUG oslo_vmware.api [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 868.605190] env[68674]: value = "task-3240211" [ 868.605190] env[68674]: _type = "Task" [ 868.605190] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.637686] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae5b31c2-0002-403e-9e1c-2bf0f7a78db8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.644884] env[68674]: DEBUG oslo_vmware.api [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240211, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.651248] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b772921-fcf2-4c34-b9ab-8e1ae5196345 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.667370] env[68674]: DEBUG nova.compute.provider_tree [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 868.808060] env[68674]: DEBUG oslo_vmware.api [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240209, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064762} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.808349] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 868.809134] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9cce9c1-b554-4ebc-8bf6-ac405b3bc649 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.830737] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] 867fd9ca-049f-441a-94bc-af60df598043/867fd9ca-049f-441a-94bc-af60df598043.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 868.831018] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3e5779c3-5725-4f2d-9885-ef55d49cdd8d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.855814] env[68674]: DEBUG oslo_vmware.api [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240208, 'name': PowerOnVM_Task, 'duration_secs': 0.887317} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.858829] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 868.859046] env[68674]: INFO nova.compute.manager [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Took 5.39 seconds to spawn the instance on the hypervisor. [ 868.859232] env[68674]: DEBUG nova.compute.manager [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 868.859516] env[68674]: DEBUG oslo_vmware.api [None req-6a36e8fd-fe07-4e35-a913-5b1b791be68a tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240210, 'name': PowerOffVM_Task, 'duration_secs': 0.322221} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.861015] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e72f42b2-4497-4ad9-b2f2-fe1414ad7d45 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.863588] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a36e8fd-fe07-4e35-a913-5b1b791be68a tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 868.863710] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6a36e8fd-fe07-4e35-a913-5b1b791be68a tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 868.863989] env[68674]: DEBUG oslo_vmware.api [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Waiting for the task: (returnval){ [ 868.863989] env[68674]: value = "task-3240212" [ 868.863989] env[68674]: _type = "Task" [ 868.863989] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.864216] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-97bd496c-4cf8-4eef-8a2d-b13c3bde3a37 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.878444] env[68674]: DEBUG oslo_vmware.api [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240212, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.955518] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6a36e8fd-fe07-4e35-a913-5b1b791be68a tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 868.955809] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6a36e8fd-fe07-4e35-a913-5b1b791be68a tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 868.956062] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a36e8fd-fe07-4e35-a913-5b1b791be68a tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Deleting the datastore file [datastore1] 95386cdb-c2e4-476a-8aaf-e10fdc40b591 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 868.956352] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-52fd800e-ffa0-4252-a65d-0219d2ff8e36 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.963295] env[68674]: DEBUG oslo_vmware.api [None req-6a36e8fd-fe07-4e35-a913-5b1b791be68a tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 868.963295] env[68674]: value = "task-3240214" [ 868.963295] env[68674]: _type = "Task" [ 868.963295] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.972118] env[68674]: DEBUG oslo_vmware.api [None req-6a36e8fd-fe07-4e35-a913-5b1b791be68a tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240214, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.115856] env[68674]: DEBUG oslo_vmware.api [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240211, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.198431] env[68674]: DEBUG nova.scheduler.client.report [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Updated inventory for provider ade3f042-7427-494b-9654-0b65e074850c with generation 104 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 869.198760] env[68674]: DEBUG nova.compute.provider_tree [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Updating resource provider ade3f042-7427-494b-9654-0b65e074850c generation from 104 to 105 during operation: update_inventory {{(pid=68674) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 869.199060] env[68674]: DEBUG nova.compute.provider_tree [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 869.379564] env[68674]: DEBUG oslo_vmware.api [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240212, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.381865] env[68674]: INFO nova.compute.manager [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Took 44.00 seconds to build instance. [ 869.473250] env[68674]: DEBUG oslo_vmware.api [None req-6a36e8fd-fe07-4e35-a913-5b1b791be68a tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240214, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146441} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.473526] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a36e8fd-fe07-4e35-a913-5b1b791be68a tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 869.473718] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6a36e8fd-fe07-4e35-a913-5b1b791be68a tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 869.473892] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6a36e8fd-fe07-4e35-a913-5b1b791be68a tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 869.474086] env[68674]: INFO nova.compute.manager [None req-6a36e8fd-fe07-4e35-a913-5b1b791be68a tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Took 1.17 seconds to destroy the instance on the hypervisor. [ 869.474331] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6a36e8fd-fe07-4e35-a913-5b1b791be68a tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 869.474541] env[68674]: DEBUG nova.compute.manager [-] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 869.474638] env[68674]: DEBUG nova.network.neutron [-] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 869.616083] env[68674]: DEBUG oslo_vmware.api [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240211, 'name': PowerOnVM_Task, 'duration_secs': 0.714572} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.616366] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 869.616549] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-06ce3db3-00e5-4c25-84a3-01b920bb709f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Updating instance 'e1283f87-5bdb-4d4e-a1c5-f3b1c9180188' progress to 100 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 869.704896] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68674) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 869.705983] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 5.650s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 869.705983] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5030da48-6cc2-434e-8529-ad1074d58be3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 43.200s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 869.705983] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5030da48-6cc2-434e-8529-ad1074d58be3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 869.707867] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 42.738s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 869.709392] env[68674]: INFO nova.compute.claims [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 869.712726] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 869.713664] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Cleaning up deleted instances {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 869.753710] env[68674]: INFO nova.scheduler.client.report [None req-5030da48-6cc2-434e-8529-ad1074d58be3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Deleted allocations for instance 5bd42044-84f5-4f48-aa97-b7cf990ed35d [ 869.833091] env[68674]: DEBUG nova.compute.manager [req-b9b71fe9-a324-4531-a1e5-b5e8f0901ef3 req-72cbc8e4-e589-4b34-9d8c-69b31d6a1988 service nova] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Received event network-vif-deleted-7e110d01-86a3-4380-aee9-f6baa501e7bf {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 869.833398] env[68674]: INFO nova.compute.manager [req-b9b71fe9-a324-4531-a1e5-b5e8f0901ef3 req-72cbc8e4-e589-4b34-9d8c-69b31d6a1988 service nova] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Neutron deleted interface 7e110d01-86a3-4380-aee9-f6baa501e7bf; detaching it from the instance and deleting it from the info cache [ 869.833655] env[68674]: DEBUG nova.network.neutron [req-b9b71fe9-a324-4531-a1e5-b5e8f0901ef3 req-72cbc8e4-e589-4b34-9d8c-69b31d6a1988 service nova] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.875894] env[68674]: DEBUG oslo_vmware.api [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240212, 'name': ReconfigVM_Task, 'duration_secs': 0.613693} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.878896] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Reconfigured VM instance instance-00000046 to attach disk [datastore2] 867fd9ca-049f-441a-94bc-af60df598043/867fd9ca-049f-441a-94bc-af60df598043.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 869.878896] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-62feea83-90e6-4223-b3bc-e183ad8741a8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.885247] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ac3c4f8f-a30c-4cd6-a023-ff072cc72786 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Lock "040d7108-8da1-4914-b7fd-03cf09ec68aa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.514s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 869.885247] env[68674]: DEBUG oslo_vmware.api [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Waiting for the task: (returnval){ [ 869.885247] env[68674]: value = "task-3240215" [ 869.885247] env[68674]: _type = "Task" [ 869.885247] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.895445] env[68674]: DEBUG oslo_vmware.api [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240215, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.226216] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] There are 45 instances to clean {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 870.226527] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 7329e503-d87d-4e15-b181-65ac6e376781] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 870.244762] env[68674]: DEBUG nova.network.neutron [-] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.262891] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5030da48-6cc2-434e-8529-ad1074d58be3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Lock "5bd42044-84f5-4f48-aa97-b7cf990ed35d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 47.362s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 870.336977] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dcf74cfd-e376-4e27-93e1-5894c42d7db4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.346673] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8402bb61-8da9-4df7-b78b-88c9a79d0d68 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.380449] env[68674]: DEBUG nova.compute.manager [req-b9b71fe9-a324-4531-a1e5-b5e8f0901ef3 req-72cbc8e4-e589-4b34-9d8c-69b31d6a1988 service nova] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Detach interface failed, port_id=7e110d01-86a3-4380-aee9-f6baa501e7bf, reason: Instance 95386cdb-c2e4-476a-8aaf-e10fdc40b591 could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 870.392121] env[68674]: DEBUG oslo_vmware.api [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240215, 'name': Rename_Task, 'duration_secs': 0.185328} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.392421] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 870.392648] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-da402035-76a5-4c88-b5bd-f13ebcda14d2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.398164] env[68674]: DEBUG oslo_vmware.api [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Waiting for the task: (returnval){ [ 870.398164] env[68674]: value = "task-3240217" [ 870.398164] env[68674]: _type = "Task" [ 870.398164] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.405653] env[68674]: DEBUG oslo_vmware.api [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240217, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.731428] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 1a84d8ba-a6a2-4ccb-8ca3-d707e4a65a9f] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 870.747321] env[68674]: INFO nova.compute.manager [-] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Took 1.27 seconds to deallocate network for instance. [ 870.909705] env[68674]: DEBUG oslo_vmware.api [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240217, 'name': PowerOnVM_Task, 'duration_secs': 0.460995} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.909988] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 870.910379] env[68674]: INFO nova.compute.manager [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Took 5.32 seconds to spawn the instance on the hypervisor. [ 870.910581] env[68674]: DEBUG nova.compute.manager [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 870.911363] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6a83a98-d481-4511-91df-c9723ef1b187 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.202094] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e63c1b65-5376-45f2-91fd-adf46d58a25e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.209978] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf5dcf26-4692-4347-8cb0-dbf133eebfea {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.244439] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 55727bbc-6b65-4e4c-ba4f-8240efbf052a] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 871.247578] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-375751bc-2159-44bb-aad8-bb22d80abae9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.253684] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6a36e8fd-fe07-4e35-a913-5b1b791be68a tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 871.258119] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db2a004b-f3da-46e5-919b-a91d441bb926 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.274882] env[68674]: DEBUG nova.compute.provider_tree [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 871.428918] env[68674]: INFO nova.compute.manager [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Took 45.45 seconds to build instance. [ 871.682640] env[68674]: DEBUG oslo_concurrency.lockutils [None req-003a56b2-ef94-4371-ae20-1995599de147 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "e1283f87-5bdb-4d4e-a1c5-f3b1c9180188" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 871.682901] env[68674]: DEBUG oslo_concurrency.lockutils [None req-003a56b2-ef94-4371-ae20-1995599de147 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "e1283f87-5bdb-4d4e-a1c5-f3b1c9180188" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 871.683107] env[68674]: DEBUG nova.compute.manager [None req-003a56b2-ef94-4371-ae20-1995599de147 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Going to confirm migration 2 {{(pid=68674) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 871.751209] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 714142ec-89ad-44ab-8543-11493172a50b] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 871.777818] env[68674]: DEBUG nova.scheduler.client.report [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 871.931200] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3511b26a-0e4e-43ff-bfe9-c6868f69f1e6 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Lock "867fd9ca-049f-441a-94bc-af60df598043" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.960s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 872.257015] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 2007222e-e4e5-44b3-bd9e-55b4a2143c3e] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 872.263980] env[68674]: DEBUG oslo_concurrency.lockutils [None req-003a56b2-ef94-4371-ae20-1995599de147 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "refresh_cache-e1283f87-5bdb-4d4e-a1c5-f3b1c9180188" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.264276] env[68674]: DEBUG oslo_concurrency.lockutils [None req-003a56b2-ef94-4371-ae20-1995599de147 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquired lock "refresh_cache-e1283f87-5bdb-4d4e-a1c5-f3b1c9180188" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 872.264536] env[68674]: DEBUG nova.network.neutron [None req-003a56b2-ef94-4371-ae20-1995599de147 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 872.264894] env[68674]: DEBUG nova.objects.instance [None req-003a56b2-ef94-4371-ae20-1995599de147 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lazy-loading 'info_cache' on Instance uuid e1283f87-5bdb-4d4e-a1c5-f3b1c9180188 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 872.282342] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.574s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 872.282885] env[68674]: DEBUG nova.compute.manager [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 872.285421] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 43.279s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 872.676666] env[68674]: INFO nova.compute.manager [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Rebuilding instance [ 872.719493] env[68674]: DEBUG nova.compute.manager [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 872.720439] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-163e3315-2a9e-47ca-aced-7419f0f58ded {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.760207] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 0e3c27fe-a2d9-45dc-9559-a678f90a6fef] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 872.788957] env[68674]: DEBUG nova.compute.utils [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 872.792150] env[68674]: INFO nova.compute.claims [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 872.795631] env[68674]: DEBUG nova.compute.manager [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 872.795772] env[68674]: DEBUG nova.network.neutron [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] [instance: 036fbca7-be6a-43c6-972e-a71524833498] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 872.819549] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a6fecb7d-6bc4-41df-a8c2-456209a123c3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Acquiring lock "fa89e0b5-590d-43fb-bb11-02f8fdee0c2f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 872.820299] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a6fecb7d-6bc4-41df-a8c2-456209a123c3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Lock "fa89e0b5-590d-43fb-bb11-02f8fdee0c2f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 872.820299] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a6fecb7d-6bc4-41df-a8c2-456209a123c3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Acquiring lock "fa89e0b5-590d-43fb-bb11-02f8fdee0c2f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 872.820430] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a6fecb7d-6bc4-41df-a8c2-456209a123c3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Lock "fa89e0b5-590d-43fb-bb11-02f8fdee0c2f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 872.820692] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a6fecb7d-6bc4-41df-a8c2-456209a123c3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Lock "fa89e0b5-590d-43fb-bb11-02f8fdee0c2f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 872.824985] env[68674]: INFO nova.compute.manager [None req-a6fecb7d-6bc4-41df-a8c2-456209a123c3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Terminating instance [ 872.857142] env[68674]: DEBUG nova.policy [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6e8f4d157c1e446db432682005d549fc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '66bcc36bd652448cae86957bf50d8b53', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 873.207689] env[68674]: DEBUG nova.network.neutron [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Successfully created port: a46c478e-b4c3-4dbf-8882-05b024f6d89b {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 873.265201] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 1699f556-d451-40e3-a213-7edb753b03f1] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 873.301119] env[68674]: INFO nova.compute.resource_tracker [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Updating resource usage from migration dbcb4b5d-eb95-4807-bf85-9adfb2b1f475 [ 873.303022] env[68674]: DEBUG nova.compute.manager [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 873.335252] env[68674]: DEBUG nova.compute.manager [None req-a6fecb7d-6bc4-41df-a8c2-456209a123c3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 873.335252] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a6fecb7d-6bc4-41df-a8c2-456209a123c3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 873.335252] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a4f38e7-0911-48a7-b2df-0d4a2cafa014 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.353337] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6fecb7d-6bc4-41df-a8c2-456209a123c3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 873.353858] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-497e3634-6a04-45fa-89e1-f77366cffbd0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.362663] env[68674]: DEBUG oslo_vmware.api [None req-a6fecb7d-6bc4-41df-a8c2-456209a123c3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Waiting for the task: (returnval){ [ 873.362663] env[68674]: value = "task-3240218" [ 873.362663] env[68674]: _type = "Task" [ 873.362663] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.373400] env[68674]: DEBUG oslo_vmware.api [None req-a6fecb7d-6bc4-41df-a8c2-456209a123c3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3240218, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.717068] env[68674]: DEBUG nova.network.neutron [None req-003a56b2-ef94-4371-ae20-1995599de147 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Updating instance_info_cache with network_info: [{"id": "9f0aa506-1438-47ac-871c-632df3f943bf", "address": "fa:16:3e:e9:fe:db", "network": {"id": "d412f884-932c-461f-8f04-990897b04532", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-692483706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6b179855b874365964446f95f9f5a53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f0aa506-14", "ovs_interfaceid": "9f0aa506-1438-47ac-871c-632df3f943bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 873.733161] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 873.734462] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b8cef78e-6510-465d-8296-6779b6c9a313 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.740920] env[68674]: DEBUG oslo_vmware.api [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Waiting for the task: (returnval){ [ 873.740920] env[68674]: value = "task-3240219" [ 873.740920] env[68674]: _type = "Task" [ 873.740920] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.750035] env[68674]: DEBUG oslo_vmware.api [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240219, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.767132] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 3bc7b52b-2f2c-485f-ad4b-05fb0fdc9e1f] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 873.828029] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c91d39e9-964b-44ae-85a2-029f73909fdc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.837100] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-367a006d-840f-4c6d-b84a-7e4c78964f16 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.873371] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ad82934-4c6c-4731-8a8e-2c2183bb7071 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.881866] env[68674]: DEBUG oslo_vmware.api [None req-a6fecb7d-6bc4-41df-a8c2-456209a123c3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3240218, 'name': PowerOffVM_Task, 'duration_secs': 0.316278} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.883951] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6fecb7d-6bc4-41df-a8c2-456209a123c3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 873.884145] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a6fecb7d-6bc4-41df-a8c2-456209a123c3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 873.884425] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fafb160f-da85-4cfa-a78e-33916158e7b0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.886713] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfc1dbdd-3228-4ee1-a55c-0e5bbb3704f2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.902442] env[68674]: DEBUG nova.compute.provider_tree [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 873.955787] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a6fecb7d-6bc4-41df-a8c2-456209a123c3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 873.956586] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a6fecb7d-6bc4-41df-a8c2-456209a123c3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 873.956586] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6fecb7d-6bc4-41df-a8c2-456209a123c3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Deleting the datastore file [datastore1] fa89e0b5-590d-43fb-bb11-02f8fdee0c2f {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 873.956586] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3dfa0a87-4364-4b26-b8b2-37ffe1294268 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.963865] env[68674]: DEBUG oslo_vmware.api [None req-a6fecb7d-6bc4-41df-a8c2-456209a123c3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Waiting for the task: (returnval){ [ 873.963865] env[68674]: value = "task-3240221" [ 873.963865] env[68674]: _type = "Task" [ 873.963865] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.972656] env[68674]: DEBUG oslo_vmware.api [None req-a6fecb7d-6bc4-41df-a8c2-456209a123c3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3240221, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.221035] env[68674]: DEBUG oslo_concurrency.lockutils [None req-003a56b2-ef94-4371-ae20-1995599de147 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Releasing lock "refresh_cache-e1283f87-5bdb-4d4e-a1c5-f3b1c9180188" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 874.221035] env[68674]: DEBUG nova.objects.instance [None req-003a56b2-ef94-4371-ae20-1995599de147 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lazy-loading 'migration_context' on Instance uuid e1283f87-5bdb-4d4e-a1c5-f3b1c9180188 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 874.252015] env[68674]: DEBUG oslo_vmware.api [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240219, 'name': PowerOffVM_Task, 'duration_secs': 0.262073} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.252311] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 874.252579] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 874.253380] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-328c2627-87f4-44e7-9ba5-af198f4aa1bc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.260204] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 874.260461] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bb74c8bf-13ad-4d38-9984-9938d97ba586 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.270039] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 3c8459db-cc54-4644-8e4c-83c87017a186] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 874.286908] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 874.286908] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 874.286908] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Deleting the datastore file [datastore2] 867fd9ca-049f-441a-94bc-af60df598043 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 874.287070] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e8638e52-7507-4fb0-92f5-fb11f60281c9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.294030] env[68674]: DEBUG oslo_vmware.api [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Waiting for the task: (returnval){ [ 874.294030] env[68674]: value = "task-3240223" [ 874.294030] env[68674]: _type = "Task" [ 874.294030] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.302834] env[68674]: DEBUG oslo_vmware.api [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240223, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.316479] env[68674]: DEBUG nova.compute.manager [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 874.342927] env[68674]: DEBUG nova.virt.hardware [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 874.343213] env[68674]: DEBUG nova.virt.hardware [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 874.343377] env[68674]: DEBUG nova.virt.hardware [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 874.343563] env[68674]: DEBUG nova.virt.hardware [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 874.343768] env[68674]: DEBUG nova.virt.hardware [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 874.343946] env[68674]: DEBUG nova.virt.hardware [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 874.344174] env[68674]: DEBUG nova.virt.hardware [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 874.344351] env[68674]: DEBUG nova.virt.hardware [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 874.344523] env[68674]: DEBUG nova.virt.hardware [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 874.344733] env[68674]: DEBUG nova.virt.hardware [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 874.344929] env[68674]: DEBUG nova.virt.hardware [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 874.345832] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8562ef2f-a707-49f1-b449-3d4c5a78c1c6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.354551] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c3cc3a1-bac1-4c91-93d7-2ff7ce3a3037 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.405763] env[68674]: DEBUG nova.scheduler.client.report [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 874.473814] env[68674]: DEBUG oslo_vmware.api [None req-a6fecb7d-6bc4-41df-a8c2-456209a123c3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Task: {'id': task-3240221, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.125138} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.474117] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6fecb7d-6bc4-41df-a8c2-456209a123c3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 874.474326] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a6fecb7d-6bc4-41df-a8c2-456209a123c3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 874.474538] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a6fecb7d-6bc4-41df-a8c2-456209a123c3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 874.474713] env[68674]: INFO nova.compute.manager [None req-a6fecb7d-6bc4-41df-a8c2-456209a123c3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Took 1.14 seconds to destroy the instance on the hypervisor. [ 874.474953] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a6fecb7d-6bc4-41df-a8c2-456209a123c3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 874.475159] env[68674]: DEBUG nova.compute.manager [-] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 874.475255] env[68674]: DEBUG nova.network.neutron [-] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 874.726025] env[68674]: DEBUG nova.objects.base [None req-003a56b2-ef94-4371-ae20-1995599de147 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=68674) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 874.726025] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99714095-e2d6-4f60-933e-f8b278e35824 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.751975] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bbb08530-6c79-4dfd-903d-44f068086619 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.759876] env[68674]: DEBUG oslo_vmware.api [None req-003a56b2-ef94-4371-ae20-1995599de147 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 874.759876] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523bd94f-0d62-4fd5-b2ca-6c1d5492e1b6" [ 874.759876] env[68674]: _type = "Task" [ 874.759876] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.768433] env[68674]: DEBUG oslo_vmware.api [None req-003a56b2-ef94-4371-ae20-1995599de147 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523bd94f-0d62-4fd5-b2ca-6c1d5492e1b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.772534] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 1b405b1f-ee1f-4e6e-9355-de8b5c26ab49] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 874.805536] env[68674]: DEBUG oslo_vmware.api [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240223, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.088554} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.806171] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 874.806681] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 874.807089] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 874.913400] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.625s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 874.913400] env[68674]: INFO nova.compute.manager [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Migrating [ 874.920333] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 44.990s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 874.922774] env[68674]: INFO nova.compute.claims [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 875.205924] env[68674]: DEBUG nova.compute.manager [req-6e6dbf1e-780c-4966-a2ee-2b522dc1fe2b req-b95a560d-ccae-4e65-b590-d5fd8030e020 service nova] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Received event network-vif-plugged-a46c478e-b4c3-4dbf-8882-05b024f6d89b {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 875.206496] env[68674]: DEBUG oslo_concurrency.lockutils [req-6e6dbf1e-780c-4966-a2ee-2b522dc1fe2b req-b95a560d-ccae-4e65-b590-d5fd8030e020 service nova] Acquiring lock "036fbca7-be6a-43c6-972e-a71524833498-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 875.206496] env[68674]: DEBUG oslo_concurrency.lockutils [req-6e6dbf1e-780c-4966-a2ee-2b522dc1fe2b req-b95a560d-ccae-4e65-b590-d5fd8030e020 service nova] Lock "036fbca7-be6a-43c6-972e-a71524833498-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 875.206694] env[68674]: DEBUG oslo_concurrency.lockutils [req-6e6dbf1e-780c-4966-a2ee-2b522dc1fe2b req-b95a560d-ccae-4e65-b590-d5fd8030e020 service nova] Lock "036fbca7-be6a-43c6-972e-a71524833498-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 875.206922] env[68674]: DEBUG nova.compute.manager [req-6e6dbf1e-780c-4966-a2ee-2b522dc1fe2b req-b95a560d-ccae-4e65-b590-d5fd8030e020 service nova] [instance: 036fbca7-be6a-43c6-972e-a71524833498] No waiting events found dispatching network-vif-plugged-a46c478e-b4c3-4dbf-8882-05b024f6d89b {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 875.207597] env[68674]: WARNING nova.compute.manager [req-6e6dbf1e-780c-4966-a2ee-2b522dc1fe2b req-b95a560d-ccae-4e65-b590-d5fd8030e020 service nova] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Received unexpected event network-vif-plugged-a46c478e-b4c3-4dbf-8882-05b024f6d89b for instance with vm_state building and task_state spawning. [ 875.241259] env[68674]: DEBUG nova.network.neutron [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Successfully updated port: a46c478e-b4c3-4dbf-8882-05b024f6d89b {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 875.273314] env[68674]: DEBUG oslo_vmware.api [None req-003a56b2-ef94-4371-ae20-1995599de147 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523bd94f-0d62-4fd5-b2ca-6c1d5492e1b6, 'name': SearchDatastore_Task, 'duration_secs': 0.007427} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.273960] env[68674]: DEBUG oslo_concurrency.lockutils [None req-003a56b2-ef94-4371-ae20-1995599de147 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 875.275535] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: d88ccf9b-7432-4be0-82f7-b2a9155f7d86] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 875.407142] env[68674]: DEBUG nova.compute.manager [req-13b71806-4699-4b61-a299-c778bc145258 req-f6316c27-570e-4d8b-b8a0-d943894623cb service nova] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Received event network-changed-a46c478e-b4c3-4dbf-8882-05b024f6d89b {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 875.407359] env[68674]: DEBUG nova.compute.manager [req-13b71806-4699-4b61-a299-c778bc145258 req-f6316c27-570e-4d8b-b8a0-d943894623cb service nova] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Refreshing instance network info cache due to event network-changed-a46c478e-b4c3-4dbf-8882-05b024f6d89b. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 875.407571] env[68674]: DEBUG oslo_concurrency.lockutils [req-13b71806-4699-4b61-a299-c778bc145258 req-f6316c27-570e-4d8b-b8a0-d943894623cb service nova] Acquiring lock "refresh_cache-036fbca7-be6a-43c6-972e-a71524833498" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.407715] env[68674]: DEBUG oslo_concurrency.lockutils [req-13b71806-4699-4b61-a299-c778bc145258 req-f6316c27-570e-4d8b-b8a0-d943894623cb service nova] Acquired lock "refresh_cache-036fbca7-be6a-43c6-972e-a71524833498" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 875.407876] env[68674]: DEBUG nova.network.neutron [req-13b71806-4699-4b61-a299-c778bc145258 req-f6316c27-570e-4d8b-b8a0-d943894623cb service nova] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Refreshing network info cache for port a46c478e-b4c3-4dbf-8882-05b024f6d89b {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 875.437269] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "refresh_cache-3d85c8c4-f09c-4f75-aff5-9a49d84ae006" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.437442] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquired lock "refresh_cache-3d85c8c4-f09c-4f75-aff5-9a49d84ae006" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 875.437618] env[68674]: DEBUG nova.network.neutron [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 875.744756] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Acquiring lock "refresh_cache-036fbca7-be6a-43c6-972e-a71524833498" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.778618] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 1189fa93-608b-4684-a675-f1caf29a9f43] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 875.841468] env[68674]: DEBUG nova.virt.hardware [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 875.841713] env[68674]: DEBUG nova.virt.hardware [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 875.841871] env[68674]: DEBUG nova.virt.hardware [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 875.842068] env[68674]: DEBUG nova.virt.hardware [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 875.842220] env[68674]: DEBUG nova.virt.hardware [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 875.842427] env[68674]: DEBUG nova.virt.hardware [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 875.842652] env[68674]: DEBUG nova.virt.hardware [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 875.842814] env[68674]: DEBUG nova.virt.hardware [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 875.842982] env[68674]: DEBUG nova.virt.hardware [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 875.843166] env[68674]: DEBUG nova.virt.hardware [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 875.843340] env[68674]: DEBUG nova.virt.hardware [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 875.844219] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5268159-7a76-4569-ba96-77386d3b840a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.852008] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92ac1100-0954-49ae-8074-f7efa664c6b9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.865638] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Instance VIF info [] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 875.871213] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 875.871750] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 875.871966] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-afb343ff-63c4-435b-8b96-46640d138f19 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.887912] env[68674]: DEBUG nova.network.neutron [-] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.890096] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 875.890096] env[68674]: value = "task-3240224" [ 875.890096] env[68674]: _type = "Task" [ 875.890096] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.898095] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240224, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.944579] env[68674]: DEBUG nova.network.neutron [req-13b71806-4699-4b61-a299-c778bc145258 req-f6316c27-570e-4d8b-b8a0-d943894623cb service nova] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 876.045078] env[68674]: DEBUG nova.network.neutron [req-13b71806-4699-4b61-a299-c778bc145258 req-f6316c27-570e-4d8b-b8a0-d943894623cb service nova] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.239991] env[68674]: DEBUG nova.network.neutron [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Updating instance_info_cache with network_info: [{"id": "1b7ed5a9-214f-4011-b73e-63954c02e25e", "address": "fa:16:3e:4f:37:80", "network": {"id": "cd9a6296-fa96-4117-b8b5-3884d0d16745", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1543887384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61ea6bfeb37d470a970e9c98e4827ade", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b7ed5a9-21", "ovs_interfaceid": "1b7ed5a9-214f-4011-b73e-63954c02e25e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.281486] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 367461db-8bc4-4cf0-b7f6-f79ee2bf8589] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 876.391153] env[68674]: INFO nova.compute.manager [-] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Took 1.92 seconds to deallocate network for instance. [ 876.408690] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240224, 'name': CreateVM_Task, 'duration_secs': 0.484042} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.408874] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 876.409385] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.409597] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 876.409964] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 876.410224] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9b5dfde-7667-4436-a0e4-8459b0d72b8a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.416520] env[68674]: DEBUG oslo_vmware.api [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Waiting for the task: (returnval){ [ 876.416520] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d4b16a-7682-7754-e03d-399be140d487" [ 876.416520] env[68674]: _type = "Task" [ 876.416520] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.426056] env[68674]: DEBUG oslo_vmware.api [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d4b16a-7682-7754-e03d-399be140d487, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.439950] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63584040-c06c-4fdb-b068-10bd22ab76c5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.449140] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb76adb8-7cc2-45e2-bba1-c25028531e94 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.484427] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eab6369c-c745-48fb-a50b-38556982fa17 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.492908] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff926bba-7744-4da1-979e-d487085a4aa9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.507882] env[68674]: DEBUG nova.compute.provider_tree [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 876.547816] env[68674]: DEBUG oslo_concurrency.lockutils [req-13b71806-4699-4b61-a299-c778bc145258 req-f6316c27-570e-4d8b-b8a0-d943894623cb service nova] Releasing lock "refresh_cache-036fbca7-be6a-43c6-972e-a71524833498" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 876.548234] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Acquired lock "refresh_cache-036fbca7-be6a-43c6-972e-a71524833498" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 876.548397] env[68674]: DEBUG nova.network.neutron [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 876.743293] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Releasing lock "refresh_cache-3d85c8c4-f09c-4f75-aff5-9a49d84ae006" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 876.786967] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 505b0352-39ab-4841-8766-14626af2b13e] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 876.904071] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a6fecb7d-6bc4-41df-a8c2-456209a123c3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 876.927037] env[68674]: DEBUG oslo_vmware.api [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d4b16a-7682-7754-e03d-399be140d487, 'name': SearchDatastore_Task, 'duration_secs': 0.008986} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.927337] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 876.927576] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 876.927812] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.927959] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 876.928154] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 876.928400] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b26bd37-e900-40bf-a85b-e7425cf5799c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.936407] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 876.936579] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 876.937274] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5089ea96-fbd5-4ab8-b693-fea9749caf17 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.942279] env[68674]: DEBUG oslo_vmware.api [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Waiting for the task: (returnval){ [ 876.942279] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52095392-8a2a-ff73-89c9-c45707563fa9" [ 876.942279] env[68674]: _type = "Task" [ 876.942279] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.949889] env[68674]: DEBUG oslo_vmware.api [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52095392-8a2a-ff73-89c9-c45707563fa9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.041631] env[68674]: DEBUG nova.scheduler.client.report [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Updated inventory for provider ade3f042-7427-494b-9654-0b65e074850c with generation 105 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 877.041930] env[68674]: DEBUG nova.compute.provider_tree [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Updating resource provider ade3f042-7427-494b-9654-0b65e074850c generation from 105 to 106 during operation: update_inventory {{(pid=68674) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 877.042128] env[68674]: DEBUG nova.compute.provider_tree [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 877.081617] env[68674]: DEBUG nova.network.neutron [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 877.259635] env[68674]: DEBUG nova.network.neutron [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Updating instance_info_cache with network_info: [{"id": "a46c478e-b4c3-4dbf-8882-05b024f6d89b", "address": "fa:16:3e:4d:66:aa", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.218", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa46c478e-b4", "ovs_interfaceid": "a46c478e-b4c3-4dbf-8882-05b024f6d89b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 877.288113] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 3b0837ef-53fb-4851-b69f-ee0a1d89fbf8] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 877.453665] env[68674]: DEBUG oslo_vmware.api [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52095392-8a2a-ff73-89c9-c45707563fa9, 'name': SearchDatastore_Task, 'duration_secs': 0.008446} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.454453] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-193f6e1f-8553-40cc-b11f-40a397bf5a28 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.460809] env[68674]: DEBUG oslo_vmware.api [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Waiting for the task: (returnval){ [ 877.460809] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527c68a1-d59a-4342-2f6b-f5db01e3a0d7" [ 877.460809] env[68674]: _type = "Task" [ 877.460809] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.467642] env[68674]: DEBUG nova.compute.manager [req-75f3a4a9-75b2-4303-8ba8-208661004b16 req-56141f62-b857-41c2-b802-25a903ae1071 service nova] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Received event network-vif-deleted-0ef8fcab-d9ac-4837-bdf3-84d597114e1d {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 877.471242] env[68674]: DEBUG oslo_vmware.api [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527c68a1-d59a-4342-2f6b-f5db01e3a0d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.547502] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.627s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 877.548061] env[68674]: DEBUG nova.compute.manager [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 877.551264] env[68674]: DEBUG oslo_concurrency.lockutils [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.918s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 877.553152] env[68674]: INFO nova.compute.claims [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 877.761809] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Releasing lock "refresh_cache-036fbca7-be6a-43c6-972e-a71524833498" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 877.762179] env[68674]: DEBUG nova.compute.manager [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Instance network_info: |[{"id": "a46c478e-b4c3-4dbf-8882-05b024f6d89b", "address": "fa:16:3e:4d:66:aa", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.218", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa46c478e-b4", "ovs_interfaceid": "a46c478e-b4c3-4dbf-8882-05b024f6d89b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 877.762739] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4d:66:aa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'df1bf911-aac9-4d2d-ae69-66ace3e6a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a46c478e-b4c3-4dbf-8882-05b024f6d89b', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 877.770343] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Creating folder: Project (66bcc36bd652448cae86957bf50d8b53). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 877.771227] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f19efa22-6a9f-44a3-99d2-46c4a68e3deb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.782611] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Created folder: Project (66bcc36bd652448cae86957bf50d8b53) in parent group-v647377. [ 877.782759] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Creating folder: Instances. Parent ref: group-v647582. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 877.782987] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a41f0247-6cac-4c43-a171-a864eb0ba5d6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.790768] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 0f618d12-dc7b-4739-8ace-9453a7175d75] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 877.793506] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Created folder: Instances in parent group-v647582. [ 877.793733] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 877.794108] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 877.794311] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9b793405-0742-43a0-8d3e-19b5bec48de7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.814338] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 877.814338] env[68674]: value = "task-3240227" [ 877.814338] env[68674]: _type = "Task" [ 877.814338] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.824944] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240227, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.970544] env[68674]: DEBUG oslo_vmware.api [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527c68a1-d59a-4342-2f6b-f5db01e3a0d7, 'name': SearchDatastore_Task, 'duration_secs': 0.010276} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.971102] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 877.971228] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 867fd9ca-049f-441a-94bc-af60df598043/867fd9ca-049f-441a-94bc-af60df598043.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 877.971533] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5a36b3a5-f50f-4e9a-b63a-542da4d322a0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.977861] env[68674]: DEBUG oslo_vmware.api [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Waiting for the task: (returnval){ [ 877.977861] env[68674]: value = "task-3240228" [ 877.977861] env[68674]: _type = "Task" [ 877.977861] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.985745] env[68674]: DEBUG oslo_vmware.api [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240228, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.060546] env[68674]: DEBUG nova.compute.utils [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 878.062163] env[68674]: DEBUG nova.compute.manager [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 878.062630] env[68674]: DEBUG nova.network.neutron [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 878.130222] env[68674]: DEBUG nova.policy [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0bc8df4f271e4330b3874e04b792a537', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a84d9d6e23bd40049c34e6f087252b4e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 878.264025] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77642f90-3693-4cc0-b51d-97ede78434d1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.283111] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Updating instance '3d85c8c4-f09c-4f75-aff5-9a49d84ae006' progress to 0 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 878.294910] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: d167585b-11f4-462c-b12e-c6a440c1476a] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 878.326553] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240227, 'name': CreateVM_Task, 'duration_secs': 0.361789} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.326770] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 878.327565] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.327751] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 878.328081] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 878.328366] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9afeeeab-3131-4cb4-823f-21f034eb06af {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.334856] env[68674]: DEBUG oslo_vmware.api [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Waiting for the task: (returnval){ [ 878.334856] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b96760-a30c-a2f1-07d8-a88651fbb57f" [ 878.334856] env[68674]: _type = "Task" [ 878.334856] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.345166] env[68674]: DEBUG oslo_vmware.api [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b96760-a30c-a2f1-07d8-a88651fbb57f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.489354] env[68674]: DEBUG oslo_vmware.api [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240228, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.546114] env[68674]: DEBUG nova.network.neutron [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Successfully created port: 856fc34c-4049-4185-9ab1-8f86e2cfdeff {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 878.565874] env[68674]: DEBUG nova.compute.manager [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 878.789577] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 878.790050] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-08fea76f-53f2-41d8-b486-fbd9b36c74f8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.798161] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 6278d756-139c-4fcd-bf31-304c978d6682] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 878.800394] env[68674]: DEBUG oslo_vmware.api [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 878.800394] env[68674]: value = "task-3240229" [ 878.800394] env[68674]: _type = "Task" [ 878.800394] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.813749] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] VM already powered off {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 878.814223] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Updating instance '3d85c8c4-f09c-4f75-aff5-9a49d84ae006' progress to 17 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 878.847724] env[68674]: DEBUG oslo_vmware.api [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b96760-a30c-a2f1-07d8-a88651fbb57f, 'name': SearchDatastore_Task, 'duration_secs': 0.052299} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.852019] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 878.852019] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 878.852019] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.852019] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 878.852019] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 878.852019] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-365234b3-3236-43ff-81ab-eebf13238091 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.862054] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 878.862215] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 878.863312] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fa529e3-66ed-499f-b6c2-2f6d129f860e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.869197] env[68674]: DEBUG oslo_vmware.api [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Waiting for the task: (returnval){ [ 878.869197] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]520e35d0-4752-bf1c-0dcd-8d366769a2c0" [ 878.869197] env[68674]: _type = "Task" [ 878.869197] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.878782] env[68674]: DEBUG oslo_vmware.api [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]520e35d0-4752-bf1c-0dcd-8d366769a2c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.988495] env[68674]: DEBUG oslo_vmware.api [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240228, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.551642} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.991113] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 867fd9ca-049f-441a-94bc-af60df598043/867fd9ca-049f-441a-94bc-af60df598043.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 878.991353] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 878.991767] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-63265021-71b9-43f9-90c9-ec7dd4b22bfa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.998977] env[68674]: DEBUG oslo_vmware.api [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Waiting for the task: (returnval){ [ 878.998977] env[68674]: value = "task-3240230" [ 878.998977] env[68674]: _type = "Task" [ 878.998977] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.009855] env[68674]: DEBUG oslo_vmware.api [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240230, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.015726] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8371539-ec37-475d-9b2e-67c143a0f1e0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.022452] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23694e55-5c8e-4dfe-ac72-6e435c497cc0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.054281] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1065e932-865b-4352-bb73-9a8ebda29ef8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.062029] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01262e4c-1ce6-4257-957b-0bcd3a17ab81 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.078242] env[68674]: DEBUG nova.compute.provider_tree [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 879.301627] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 045e54ff-9e2c-4b04-afac-34cb6580cb2c] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 879.323680] env[68674]: DEBUG nova.virt.hardware [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 879.323946] env[68674]: DEBUG nova.virt.hardware [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 879.324107] env[68674]: DEBUG nova.virt.hardware [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 879.324293] env[68674]: DEBUG nova.virt.hardware [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 879.324441] env[68674]: DEBUG nova.virt.hardware [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 879.324591] env[68674]: DEBUG nova.virt.hardware [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 879.324797] env[68674]: DEBUG nova.virt.hardware [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 879.324956] env[68674]: DEBUG nova.virt.hardware [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 879.325143] env[68674]: DEBUG nova.virt.hardware [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 879.325305] env[68674]: DEBUG nova.virt.hardware [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 879.325479] env[68674]: DEBUG nova.virt.hardware [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 879.330975] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a4a0a0b1-63b4-491f-8b75-81a265e1ce15 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.346689] env[68674]: DEBUG oslo_vmware.api [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 879.346689] env[68674]: value = "task-3240231" [ 879.346689] env[68674]: _type = "Task" [ 879.346689] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.355113] env[68674]: DEBUG oslo_vmware.api [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240231, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.379120] env[68674]: DEBUG oslo_vmware.api [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]520e35d0-4752-bf1c-0dcd-8d366769a2c0, 'name': SearchDatastore_Task, 'duration_secs': 0.009304} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.380421] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-594411ab-d578-4658-aa13-55f461908af7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.384825] env[68674]: DEBUG oslo_vmware.api [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Waiting for the task: (returnval){ [ 879.384825] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527bc50d-cc8c-aecb-a238-4f3ef623a151" [ 879.384825] env[68674]: _type = "Task" [ 879.384825] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.392429] env[68674]: DEBUG oslo_vmware.api [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527bc50d-cc8c-aecb-a238-4f3ef623a151, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.509210] env[68674]: DEBUG oslo_vmware.api [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240230, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06427} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.509531] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 879.510322] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98f3f927-b94e-47ba-86f0-b3a3d446b6ad {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.529511] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] 867fd9ca-049f-441a-94bc-af60df598043/867fd9ca-049f-441a-94bc-af60df598043.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 879.530168] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bdbe3b8a-bf76-4e57-a62e-b431377257e0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.552302] env[68674]: DEBUG oslo_vmware.api [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Waiting for the task: (returnval){ [ 879.552302] env[68674]: value = "task-3240232" [ 879.552302] env[68674]: _type = "Task" [ 879.552302] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.561755] env[68674]: DEBUG oslo_vmware.api [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240232, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.581270] env[68674]: DEBUG nova.compute.manager [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 879.619016] env[68674]: DEBUG nova.virt.hardware [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 879.619016] env[68674]: DEBUG nova.virt.hardware [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 879.619016] env[68674]: DEBUG nova.virt.hardware [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 879.619016] env[68674]: DEBUG nova.virt.hardware [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 879.619016] env[68674]: DEBUG nova.virt.hardware [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 879.619016] env[68674]: DEBUG nova.virt.hardware [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 879.619016] env[68674]: DEBUG nova.virt.hardware [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 879.619016] env[68674]: DEBUG nova.virt.hardware [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 879.619016] env[68674]: DEBUG nova.virt.hardware [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 879.619016] env[68674]: DEBUG nova.virt.hardware [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 879.619016] env[68674]: DEBUG nova.virt.hardware [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 879.619016] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-387afc57-db07-4691-bea4-d63a893769ae {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.622704] env[68674]: DEBUG nova.scheduler.client.report [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Updated inventory for provider ade3f042-7427-494b-9654-0b65e074850c with generation 106 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 879.622944] env[68674]: DEBUG nova.compute.provider_tree [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Updating resource provider ade3f042-7427-494b-9654-0b65e074850c generation from 106 to 107 during operation: update_inventory {{(pid=68674) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 879.623143] env[68674]: DEBUG nova.compute.provider_tree [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 879.631299] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f6eacce-6609-4e92-8473-58ef62ebd299 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.804391] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 0097c367-bb3a-4b7b-9fcc-b3e3482689e2] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 879.856771] env[68674]: DEBUG oslo_vmware.api [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240231, 'name': ReconfigVM_Task, 'duration_secs': 0.154365} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.857093] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Updating instance '3d85c8c4-f09c-4f75-aff5-9a49d84ae006' progress to 33 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 879.895029] env[68674]: DEBUG oslo_vmware.api [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527bc50d-cc8c-aecb-a238-4f3ef623a151, 'name': SearchDatastore_Task, 'duration_secs': 0.014944} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.895655] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 879.895934] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 036fbca7-be6a-43c6-972e-a71524833498/036fbca7-be6a-43c6-972e-a71524833498.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 879.896208] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-21bd7aa4-054e-4125-9b50-44c9d61e13b8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.903751] env[68674]: DEBUG oslo_vmware.api [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Waiting for the task: (returnval){ [ 879.903751] env[68674]: value = "task-3240233" [ 879.903751] env[68674]: _type = "Task" [ 879.903751] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.911268] env[68674]: DEBUG oslo_vmware.api [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Task: {'id': task-3240233, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.063567] env[68674]: DEBUG oslo_vmware.api [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240232, 'name': ReconfigVM_Task, 'duration_secs': 0.250511} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.064137] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Reconfigured VM instance instance-00000046 to attach disk [datastore1] 867fd9ca-049f-441a-94bc-af60df598043/867fd9ca-049f-441a-94bc-af60df598043.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 880.066485] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-382772f9-9566-48e9-b65b-b10ea39e2f03 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.073639] env[68674]: DEBUG nova.compute.manager [req-55000914-a37a-473f-8947-e458a998deb1 req-403571b9-9c7b-46ca-b198-01f94045591d service nova] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Received event network-vif-plugged-856fc34c-4049-4185-9ab1-8f86e2cfdeff {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 880.073989] env[68674]: DEBUG oslo_concurrency.lockutils [req-55000914-a37a-473f-8947-e458a998deb1 req-403571b9-9c7b-46ca-b198-01f94045591d service nova] Acquiring lock "7d953e59-53c1-4041-a641-35c12c012f7e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 880.074324] env[68674]: DEBUG oslo_concurrency.lockutils [req-55000914-a37a-473f-8947-e458a998deb1 req-403571b9-9c7b-46ca-b198-01f94045591d service nova] Lock "7d953e59-53c1-4041-a641-35c12c012f7e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 880.074948] env[68674]: DEBUG oslo_concurrency.lockutils [req-55000914-a37a-473f-8947-e458a998deb1 req-403571b9-9c7b-46ca-b198-01f94045591d service nova] Lock "7d953e59-53c1-4041-a641-35c12c012f7e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 880.075261] env[68674]: DEBUG nova.compute.manager [req-55000914-a37a-473f-8947-e458a998deb1 req-403571b9-9c7b-46ca-b198-01f94045591d service nova] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] No waiting events found dispatching network-vif-plugged-856fc34c-4049-4185-9ab1-8f86e2cfdeff {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 880.075542] env[68674]: WARNING nova.compute.manager [req-55000914-a37a-473f-8947-e458a998deb1 req-403571b9-9c7b-46ca-b198-01f94045591d service nova] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Received unexpected event network-vif-plugged-856fc34c-4049-4185-9ab1-8f86e2cfdeff for instance with vm_state building and task_state spawning. [ 880.078309] env[68674]: DEBUG oslo_vmware.api [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Waiting for the task: (returnval){ [ 880.078309] env[68674]: value = "task-3240234" [ 880.078309] env[68674]: _type = "Task" [ 880.078309] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.089788] env[68674]: DEBUG oslo_vmware.api [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240234, 'name': Rename_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.131083] env[68674]: DEBUG oslo_concurrency.lockutils [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.580s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 880.131638] env[68674]: DEBUG nova.compute.manager [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 880.134642] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 38.614s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 880.134847] env[68674]: DEBUG nova.objects.instance [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68674) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 880.203723] env[68674]: DEBUG nova.network.neutron [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Successfully updated port: 856fc34c-4049-4185-9ab1-8f86e2cfdeff {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 880.308101] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: f45200cd-6cb0-498a-8858-1e70177031d9] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 880.363274] env[68674]: DEBUG nova.virt.hardware [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 880.363580] env[68674]: DEBUG nova.virt.hardware [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 880.363740] env[68674]: DEBUG nova.virt.hardware [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 880.363937] env[68674]: DEBUG nova.virt.hardware [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 880.364096] env[68674]: DEBUG nova.virt.hardware [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 880.364247] env[68674]: DEBUG nova.virt.hardware [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 880.364450] env[68674]: DEBUG nova.virt.hardware [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 880.364611] env[68674]: DEBUG nova.virt.hardware [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 880.364778] env[68674]: DEBUG nova.virt.hardware [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 880.364929] env[68674]: DEBUG nova.virt.hardware [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 880.365113] env[68674]: DEBUG nova.virt.hardware [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 880.370519] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Reconfiguring VM instance instance-0000000c to detach disk 2000 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 880.371043] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd12adf9-f667-4e80-9776-bb8ff2b09d02 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.390086] env[68674]: DEBUG oslo_vmware.api [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 880.390086] env[68674]: value = "task-3240235" [ 880.390086] env[68674]: _type = "Task" [ 880.390086] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.397379] env[68674]: DEBUG oslo_vmware.api [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240235, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.412671] env[68674]: DEBUG oslo_vmware.api [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Task: {'id': task-3240233, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.440163} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.412896] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 036fbca7-be6a-43c6-972e-a71524833498/036fbca7-be6a-43c6-972e-a71524833498.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 880.413114] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 880.413341] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3bfc545e-e48f-419f-966c-9946bf7dbe39 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.419478] env[68674]: DEBUG oslo_vmware.api [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Waiting for the task: (returnval){ [ 880.419478] env[68674]: value = "task-3240236" [ 880.419478] env[68674]: _type = "Task" [ 880.419478] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.427356] env[68674]: DEBUG oslo_vmware.api [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Task: {'id': task-3240236, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.588147] env[68674]: DEBUG oslo_vmware.api [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240234, 'name': Rename_Task, 'duration_secs': 0.166473} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.588450] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 880.588693] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-67b1ddca-99da-47fa-b8ab-44e1b46292bf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.595051] env[68674]: DEBUG oslo_vmware.api [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Waiting for the task: (returnval){ [ 880.595051] env[68674]: value = "task-3240237" [ 880.595051] env[68674]: _type = "Task" [ 880.595051] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.603148] env[68674]: DEBUG oslo_vmware.api [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240237, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.642914] env[68674]: DEBUG nova.compute.utils [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 880.643913] env[68674]: DEBUG nova.compute.manager [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 880.644102] env[68674]: DEBUG nova.network.neutron [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 880.687476] env[68674]: DEBUG nova.policy [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fcdb66599bea45219bbf9401434e9024', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c5be31196e1f452e8768b57c105d1765', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 880.707340] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquiring lock "refresh_cache-7d953e59-53c1-4041-a641-35c12c012f7e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.707488] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquired lock "refresh_cache-7d953e59-53c1-4041-a641-35c12c012f7e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 880.707652] env[68674]: DEBUG nova.network.neutron [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 880.811799] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 275cdfcc-06f0-4c29-b18b-55cde38480a3] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 880.900789] env[68674]: DEBUG oslo_vmware.api [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240235, 'name': ReconfigVM_Task, 'duration_secs': 0.15519} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.901285] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Reconfigured VM instance instance-0000000c to detach disk 2000 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 880.902147] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33efe19f-17ab-4725-a650-d7ffcbf09815 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.927793] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] 3d85c8c4-f09c-4f75-aff5-9a49d84ae006/3d85c8c4-f09c-4f75-aff5-9a49d84ae006.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 880.928155] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-170a42c0-e235-4ed2-82bd-cfaa6ed603fe {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.951142] env[68674]: DEBUG oslo_vmware.api [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Task: {'id': task-3240236, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06514} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.952964] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 880.953382] env[68674]: DEBUG oslo_vmware.api [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 880.953382] env[68674]: value = "task-3240238" [ 880.953382] env[68674]: _type = "Task" [ 880.953382] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.954119] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-380c09f4-2d41-4ad5-99c7-9a3b890ba255 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.965836] env[68674]: DEBUG oslo_vmware.api [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240238, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.986608] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] 036fbca7-be6a-43c6-972e-a71524833498/036fbca7-be6a-43c6-972e-a71524833498.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 880.986608] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-02e03c36-2da3-43eb-9737-66872dd552b9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.005585] env[68674]: DEBUG oslo_vmware.api [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Waiting for the task: (returnval){ [ 881.005585] env[68674]: value = "task-3240239" [ 881.005585] env[68674]: _type = "Task" [ 881.005585] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.015704] env[68674]: DEBUG oslo_vmware.api [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Task: {'id': task-3240239, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.022170] env[68674]: DEBUG nova.network.neutron [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Successfully created port: 9353371a-f186-4ea6-a770-95c9d40d2340 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 881.107011] env[68674]: DEBUG oslo_vmware.api [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240237, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.144633] env[68674]: DEBUG nova.compute.manager [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 881.147904] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d2322077-a266-4614-9c97-fc5d2f8ff31b tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 881.148947] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 39.354s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 881.263742] env[68674]: DEBUG nova.network.neutron [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 881.321569] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: b4b7bd0b-b87a-479a-a99c-3a2aae9a6b14] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 881.444516] env[68674]: DEBUG nova.network.neutron [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Updating instance_info_cache with network_info: [{"id": "856fc34c-4049-4185-9ab1-8f86e2cfdeff", "address": "fa:16:3e:28:1e:94", "network": {"id": "1674c138-dbec-4d03-b5b0-d1944ab38577", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-143958570-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a84d9d6e23bd40049c34e6f087252b4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap856fc34c-40", "ovs_interfaceid": "856fc34c-4049-4185-9ab1-8f86e2cfdeff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.469245] env[68674]: DEBUG oslo_vmware.api [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240238, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.517502] env[68674]: DEBUG oslo_vmware.api [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Task: {'id': task-3240239, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.606114] env[68674]: DEBUG oslo_vmware.api [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240237, 'name': PowerOnVM_Task, 'duration_secs': 0.512636} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.606408] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 881.606620] env[68674]: DEBUG nova.compute.manager [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 881.607412] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6972d30c-b203-482b-9253-29ee1348e471 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.660977] env[68674]: INFO nova.compute.claims [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 881.833295] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: a62237a7-a123-4378-b655-d489ef08474b] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 881.947352] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Releasing lock "refresh_cache-7d953e59-53c1-4041-a641-35c12c012f7e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 881.947716] env[68674]: DEBUG nova.compute.manager [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Instance network_info: |[{"id": "856fc34c-4049-4185-9ab1-8f86e2cfdeff", "address": "fa:16:3e:28:1e:94", "network": {"id": "1674c138-dbec-4d03-b5b0-d1944ab38577", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-143958570-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a84d9d6e23bd40049c34e6f087252b4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap856fc34c-40", "ovs_interfaceid": "856fc34c-4049-4185-9ab1-8f86e2cfdeff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 881.948130] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:28:1e:94', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24ec44b7-0acf-4ff9-8bb3-4641b74af7a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '856fc34c-4049-4185-9ab1-8f86e2cfdeff', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 881.956949] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 881.957213] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 881.957435] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4ef20eb9-5e65-4ffe-832c-19fdd46aa719 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.982517] env[68674]: DEBUG oslo_vmware.api [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240238, 'name': ReconfigVM_Task, 'duration_secs': 0.978699} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.983066] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Reconfigured VM instance instance-0000000c to attach disk [datastore1] 3d85c8c4-f09c-4f75-aff5-9a49d84ae006/3d85c8c4-f09c-4f75-aff5-9a49d84ae006.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 881.983357] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Updating instance '3d85c8c4-f09c-4f75-aff5-9a49d84ae006' progress to 50 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 881.986602] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 881.986602] env[68674]: value = "task-3240240" [ 881.986602] env[68674]: _type = "Task" [ 881.986602] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.996430] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240240, 'name': CreateVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.017813] env[68674]: DEBUG oslo_vmware.api [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Task: {'id': task-3240239, 'name': ReconfigVM_Task, 'duration_secs': 0.938201} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.018119] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Reconfigured VM instance instance-00000047 to attach disk [datastore1] 036fbca7-be6a-43c6-972e-a71524833498/036fbca7-be6a-43c6-972e-a71524833498.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 882.018778] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-04cdca4f-f585-4ccc-ba16-8a35942d6634 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.026747] env[68674]: DEBUG oslo_vmware.api [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Waiting for the task: (returnval){ [ 882.026747] env[68674]: value = "task-3240241" [ 882.026747] env[68674]: _type = "Task" [ 882.026747] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.039856] env[68674]: DEBUG oslo_vmware.api [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Task: {'id': task-3240241, 'name': Rename_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.106114] env[68674]: DEBUG nova.compute.manager [req-4a3e68ba-80a3-447d-9cf2-dd1354434b03 req-af7cb1a3-a826-48f6-93cb-c9998eef916e service nova] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Received event network-changed-856fc34c-4049-4185-9ab1-8f86e2cfdeff {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 882.106114] env[68674]: DEBUG nova.compute.manager [req-4a3e68ba-80a3-447d-9cf2-dd1354434b03 req-af7cb1a3-a826-48f6-93cb-c9998eef916e service nova] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Refreshing instance network info cache due to event network-changed-856fc34c-4049-4185-9ab1-8f86e2cfdeff. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 882.106114] env[68674]: DEBUG oslo_concurrency.lockutils [req-4a3e68ba-80a3-447d-9cf2-dd1354434b03 req-af7cb1a3-a826-48f6-93cb-c9998eef916e service nova] Acquiring lock "refresh_cache-7d953e59-53c1-4041-a641-35c12c012f7e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.106114] env[68674]: DEBUG oslo_concurrency.lockutils [req-4a3e68ba-80a3-447d-9cf2-dd1354434b03 req-af7cb1a3-a826-48f6-93cb-c9998eef916e service nova] Acquired lock "refresh_cache-7d953e59-53c1-4041-a641-35c12c012f7e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 882.106114] env[68674]: DEBUG nova.network.neutron [req-4a3e68ba-80a3-447d-9cf2-dd1354434b03 req-af7cb1a3-a826-48f6-93cb-c9998eef916e service nova] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Refreshing network info cache for port 856fc34c-4049-4185-9ab1-8f86e2cfdeff {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 882.130019] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 882.154664] env[68674]: DEBUG nova.compute.manager [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 882.168186] env[68674]: INFO nova.compute.resource_tracker [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Updating resource usage from migration 8d2ad03f-6e50-4194-b3d7-a98f0bd46666 [ 882.180485] env[68674]: DEBUG nova.virt.hardware [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 882.180716] env[68674]: DEBUG nova.virt.hardware [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 882.180879] env[68674]: DEBUG nova.virt.hardware [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 882.181081] env[68674]: DEBUG nova.virt.hardware [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 882.181236] env[68674]: DEBUG nova.virt.hardware [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 882.183333] env[68674]: DEBUG nova.virt.hardware [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 882.183656] env[68674]: DEBUG nova.virt.hardware [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 882.183886] env[68674]: DEBUG nova.virt.hardware [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 882.184106] env[68674]: DEBUG nova.virt.hardware [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 882.184321] env[68674]: DEBUG nova.virt.hardware [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 882.184515] env[68674]: DEBUG nova.virt.hardware [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 882.185814] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78efd141-1015-4391-bca2-625817e6a602 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.198011] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f415cd8-373d-4e98-b6eb-cd4beea6d99b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.337553] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: f9168b78-ed64-4109-84f0-db0af61d2f10] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 882.493421] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df553829-2f4f-4390-bf4a-3b9521f412f7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.502824] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240240, 'name': CreateVM_Task, 'duration_secs': 0.455389} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.502824] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 882.503528] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.503914] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 882.504252] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 882.517252] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e08a13d6-fc84-4f98-bf06-86247e191921 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.521651] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f691368-0d7c-4926-a88b-370097263489 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.527441] env[68674]: DEBUG oslo_vmware.api [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 882.527441] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b0a6d4-1742-8723-5a0d-44d683554b37" [ 882.527441] env[68674]: _type = "Task" [ 882.527441] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.544841] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Updating instance '3d85c8c4-f09c-4f75-aff5-9a49d84ae006' progress to 67 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 882.558542] env[68674]: DEBUG oslo_vmware.api [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Task: {'id': task-3240241, 'name': Rename_Task, 'duration_secs': 0.216203} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.561896] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 882.562176] env[68674]: DEBUG oslo_vmware.api [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b0a6d4-1742-8723-5a0d-44d683554b37, 'name': SearchDatastore_Task, 'duration_secs': 0.009248} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.564437] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f713003e-4e5d-47cc-9499-a70d05390f1d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.565949] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 882.566867] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 882.567143] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.567300] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 882.567479] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 882.567884] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-76a6f5d8-b72e-4962-8809-c3dbea88353e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.574804] env[68674]: DEBUG oslo_vmware.api [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Waiting for the task: (returnval){ [ 882.574804] env[68674]: value = "task-3240242" [ 882.574804] env[68674]: _type = "Task" [ 882.574804] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.575774] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 882.575967] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 882.581509] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fcebfc94-901f-4c0f-87cb-1361dd5dd84f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.588387] env[68674]: DEBUG oslo_vmware.api [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Task: {'id': task-3240242, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.590261] env[68674]: DEBUG oslo_vmware.api [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 882.590261] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52cec9a9-dd3a-4451-cf28-bcd63ed2c630" [ 882.590261] env[68674]: _type = "Task" [ 882.590261] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.596938] env[68674]: DEBUG oslo_vmware.api [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52cec9a9-dd3a-4451-cf28-bcd63ed2c630, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.709654] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e30b985a-7729-4968-8951-705d9e50a0c2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.717712] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dbe12e0-e81b-4f76-949d-11e2abfd5ff3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.723027] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Acquiring lock "867fd9ca-049f-441a-94bc-af60df598043" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 882.723027] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Lock "867fd9ca-049f-441a-94bc-af60df598043" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 882.723027] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Acquiring lock "867fd9ca-049f-441a-94bc-af60df598043-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 882.723027] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Lock "867fd9ca-049f-441a-94bc-af60df598043-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 882.723027] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Lock "867fd9ca-049f-441a-94bc-af60df598043-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 882.754285] env[68674]: DEBUG nova.network.neutron [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Successfully updated port: 9353371a-f186-4ea6-a770-95c9d40d2340 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 882.758422] env[68674]: INFO nova.compute.manager [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Terminating instance [ 882.760725] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f9ac867-a7c0-410e-8460-fda15ec07558 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.771463] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61e9e5e4-a0f7-4cc5-bb0f-73e68d3ecc5a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.789031] env[68674]: DEBUG nova.compute.provider_tree [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 882.843020] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 627fb348-1749-4480-97b9-b479a182d4ee] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 882.972390] env[68674]: DEBUG nova.network.neutron [req-4a3e68ba-80a3-447d-9cf2-dd1354434b03 req-af7cb1a3-a826-48f6-93cb-c9998eef916e service nova] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Updated VIF entry in instance network info cache for port 856fc34c-4049-4185-9ab1-8f86e2cfdeff. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 882.973735] env[68674]: DEBUG nova.network.neutron [req-4a3e68ba-80a3-447d-9cf2-dd1354434b03 req-af7cb1a3-a826-48f6-93cb-c9998eef916e service nova] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Updating instance_info_cache with network_info: [{"id": "856fc34c-4049-4185-9ab1-8f86e2cfdeff", "address": "fa:16:3e:28:1e:94", "network": {"id": "1674c138-dbec-4d03-b5b0-d1944ab38577", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-143958570-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a84d9d6e23bd40049c34e6f087252b4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap856fc34c-40", "ovs_interfaceid": "856fc34c-4049-4185-9ab1-8f86e2cfdeff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.085488] env[68674]: DEBUG oslo_vmware.api [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Task: {'id': task-3240242, 'name': PowerOnVM_Task, 'duration_secs': 0.500429} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.085782] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 883.085950] env[68674]: INFO nova.compute.manager [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Took 8.77 seconds to spawn the instance on the hypervisor. [ 883.087014] env[68674]: DEBUG nova.compute.manager [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 883.087848] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0058d7e-538d-4816-84d8-1f44abd014fc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.102552] env[68674]: DEBUG oslo_vmware.api [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52cec9a9-dd3a-4451-cf28-bcd63ed2c630, 'name': SearchDatastore_Task, 'duration_secs': 0.011447} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.103283] env[68674]: DEBUG nova.network.neutron [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Port 1b7ed5a9-214f-4011-b73e-63954c02e25e binding to destination host cpu-1 is already ACTIVE {{(pid=68674) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 883.108661] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-019b5836-a979-4944-b350-8df8d40a4117 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.114088] env[68674]: DEBUG oslo_vmware.api [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 883.114088] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5274f5f5-94b5-a67d-8adb-56caf8ce4509" [ 883.114088] env[68674]: _type = "Task" [ 883.114088] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.122338] env[68674]: DEBUG oslo_vmware.api [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5274f5f5-94b5-a67d-8adb-56caf8ce4509, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.265031] env[68674]: DEBUG oslo_concurrency.lockutils [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Acquiring lock "refresh_cache-f147b483-9384-4fc1-996e-e8fb035c1942" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.265031] env[68674]: DEBUG oslo_concurrency.lockutils [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Acquired lock "refresh_cache-f147b483-9384-4fc1-996e-e8fb035c1942" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 883.265031] env[68674]: DEBUG nova.network.neutron [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 883.266636] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Acquiring lock "refresh_cache-867fd9ca-049f-441a-94bc-af60df598043" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.266786] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Acquired lock "refresh_cache-867fd9ca-049f-441a-94bc-af60df598043" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 883.266950] env[68674]: DEBUG nova.network.neutron [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 883.293174] env[68674]: DEBUG nova.scheduler.client.report [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 883.343781] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 3463e09e-dc2f-432c-9eff-8192c2616240] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 883.476891] env[68674]: DEBUG oslo_concurrency.lockutils [req-4a3e68ba-80a3-447d-9cf2-dd1354434b03 req-af7cb1a3-a826-48f6-93cb-c9998eef916e service nova] Releasing lock "refresh_cache-7d953e59-53c1-4041-a641-35c12c012f7e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 883.619471] env[68674]: INFO nova.compute.manager [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Took 56.68 seconds to build instance. [ 883.628129] env[68674]: DEBUG oslo_vmware.api [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5274f5f5-94b5-a67d-8adb-56caf8ce4509, 'name': SearchDatastore_Task, 'duration_secs': 0.023616} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.628476] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 883.628799] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 7d953e59-53c1-4041-a641-35c12c012f7e/7d953e59-53c1-4041-a641-35c12c012f7e.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 883.629214] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-46c8f4ea-bd75-4df2-8208-1704c832a604 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.637298] env[68674]: DEBUG oslo_vmware.api [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 883.637298] env[68674]: value = "task-3240243" [ 883.637298] env[68674]: _type = "Task" [ 883.637298] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.647292] env[68674]: DEBUG oslo_vmware.api [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240243, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.800116] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.648s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 883.800116] env[68674]: INFO nova.compute.manager [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Migrating [ 883.807288] env[68674]: DEBUG nova.network.neutron [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 883.811354] env[68674]: DEBUG oslo_concurrency.lockutils [None req-71fd8626-83d7-4cba-88c0-3146b4a22916 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.387s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 883.811670] env[68674]: DEBUG oslo_concurrency.lockutils [None req-71fd8626-83d7-4cba-88c0-3146b4a22916 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 883.813767] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.861s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 883.815426] env[68674]: INFO nova.compute.claims [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 883.832773] env[68674]: DEBUG nova.network.neutron [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 883.847309] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: ae945f3f-fde8-4b25-a5bd-81014fc99690] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 883.866484] env[68674]: INFO nova.scheduler.client.report [None req-71fd8626-83d7-4cba-88c0-3146b4a22916 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Deleted allocations for instance a4cb1632-eada-4b10-a66f-64fecf45fd76 [ 883.987567] env[68674]: DEBUG nova.network.neutron [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.107193] env[68674]: DEBUG nova.network.neutron [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Updating instance_info_cache with network_info: [{"id": "9353371a-f186-4ea6-a770-95c9d40d2340", "address": "fa:16:3e:bd:68:5d", "network": {"id": "5674f315-1fc1-48df-8016-8dfaddb1bbf5", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-878396495-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c5be31196e1f452e8768b57c105d1765", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9353371a-f1", "ovs_interfaceid": "9353371a-f186-4ea6-a770-95c9d40d2340", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.126139] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4e8ff5f8-2216-4b0f-bd07-bc60ac0d5afd tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Lock "036fbca7-be6a-43c6-972e-a71524833498" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.189s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 884.129488] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "3d85c8c4-f09c-4f75-aff5-9a49d84ae006-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 884.129488] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "3d85c8c4-f09c-4f75-aff5-9a49d84ae006-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 884.129488] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "3d85c8c4-f09c-4f75-aff5-9a49d84ae006-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 884.149321] env[68674]: DEBUG oslo_vmware.api [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240243, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.314227] env[68674]: DEBUG nova.compute.manager [req-86368dde-9f93-46d4-8972-19d183304fda req-b3cb2711-d53d-42a0-855d-3c4e8080ef97 service nova] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Received event network-vif-plugged-9353371a-f186-4ea6-a770-95c9d40d2340 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 884.314615] env[68674]: DEBUG oslo_concurrency.lockutils [req-86368dde-9f93-46d4-8972-19d183304fda req-b3cb2711-d53d-42a0-855d-3c4e8080ef97 service nova] Acquiring lock "f147b483-9384-4fc1-996e-e8fb035c1942-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 884.314967] env[68674]: DEBUG oslo_concurrency.lockutils [req-86368dde-9f93-46d4-8972-19d183304fda req-b3cb2711-d53d-42a0-855d-3c4e8080ef97 service nova] Lock "f147b483-9384-4fc1-996e-e8fb035c1942-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 884.315287] env[68674]: DEBUG oslo_concurrency.lockutils [req-86368dde-9f93-46d4-8972-19d183304fda req-b3cb2711-d53d-42a0-855d-3c4e8080ef97 service nova] Lock "f147b483-9384-4fc1-996e-e8fb035c1942-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 884.315590] env[68674]: DEBUG nova.compute.manager [req-86368dde-9f93-46d4-8972-19d183304fda req-b3cb2711-d53d-42a0-855d-3c4e8080ef97 service nova] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] No waiting events found dispatching network-vif-plugged-9353371a-f186-4ea6-a770-95c9d40d2340 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 884.315909] env[68674]: WARNING nova.compute.manager [req-86368dde-9f93-46d4-8972-19d183304fda req-b3cb2711-d53d-42a0-855d-3c4e8080ef97 service nova] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Received unexpected event network-vif-plugged-9353371a-f186-4ea6-a770-95c9d40d2340 for instance with vm_state building and task_state spawning. [ 884.316231] env[68674]: DEBUG nova.compute.manager [req-86368dde-9f93-46d4-8972-19d183304fda req-b3cb2711-d53d-42a0-855d-3c4e8080ef97 service nova] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Received event network-changed-9353371a-f186-4ea6-a770-95c9d40d2340 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 884.316530] env[68674]: DEBUG nova.compute.manager [req-86368dde-9f93-46d4-8972-19d183304fda req-b3cb2711-d53d-42a0-855d-3c4e8080ef97 service nova] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Refreshing instance network info cache due to event network-changed-9353371a-f186-4ea6-a770-95c9d40d2340. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 884.316841] env[68674]: DEBUG oslo_concurrency.lockutils [req-86368dde-9f93-46d4-8972-19d183304fda req-b3cb2711-d53d-42a0-855d-3c4e8080ef97 service nova] Acquiring lock "refresh_cache-f147b483-9384-4fc1-996e-e8fb035c1942" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.323925] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquiring lock "refresh_cache-7aa58e2f-1202-4252-9c38-ce53084c573f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.324038] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquired lock "refresh_cache-7aa58e2f-1202-4252-9c38-ce53084c573f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 884.324335] env[68674]: DEBUG nova.network.neutron [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 884.351182] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: f500b495-7bfb-40ff-8a10-e46ca6744902] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 884.381945] env[68674]: DEBUG oslo_concurrency.lockutils [None req-71fd8626-83d7-4cba-88c0-3146b4a22916 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "a4cb1632-eada-4b10-a66f-64fecf45fd76" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.813s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 884.490896] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Releasing lock "refresh_cache-867fd9ca-049f-441a-94bc-af60df598043" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 884.491608] env[68674]: DEBUG nova.compute.manager [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 884.491964] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 884.493588] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eae1d418-c2ce-4376-9ec5-25b1e98d39e0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.504490] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 884.504920] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2c626e00-054f-4244-bcaf-b25d6c2e2350 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.513111] env[68674]: DEBUG oslo_vmware.api [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Waiting for the task: (returnval){ [ 884.513111] env[68674]: value = "task-3240244" [ 884.513111] env[68674]: _type = "Task" [ 884.513111] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.524547] env[68674]: DEBUG oslo_vmware.api [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240244, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.579963] env[68674]: DEBUG oslo_concurrency.lockutils [None req-631a7d69-2079-4f34-9281-5998e9322581 tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Acquiring lock "036fbca7-be6a-43c6-972e-a71524833498" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 884.580323] env[68674]: DEBUG oslo_concurrency.lockutils [None req-631a7d69-2079-4f34-9281-5998e9322581 tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Lock "036fbca7-be6a-43c6-972e-a71524833498" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 884.580551] env[68674]: DEBUG oslo_concurrency.lockutils [None req-631a7d69-2079-4f34-9281-5998e9322581 tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Acquiring lock "036fbca7-be6a-43c6-972e-a71524833498-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 884.580772] env[68674]: DEBUG oslo_concurrency.lockutils [None req-631a7d69-2079-4f34-9281-5998e9322581 tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Lock "036fbca7-be6a-43c6-972e-a71524833498-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 884.580998] env[68674]: DEBUG oslo_concurrency.lockutils [None req-631a7d69-2079-4f34-9281-5998e9322581 tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Lock "036fbca7-be6a-43c6-972e-a71524833498-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 884.583455] env[68674]: INFO nova.compute.manager [None req-631a7d69-2079-4f34-9281-5998e9322581 tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Terminating instance [ 884.610110] env[68674]: DEBUG oslo_concurrency.lockutils [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Releasing lock "refresh_cache-f147b483-9384-4fc1-996e-e8fb035c1942" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 884.610592] env[68674]: DEBUG nova.compute.manager [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Instance network_info: |[{"id": "9353371a-f186-4ea6-a770-95c9d40d2340", "address": "fa:16:3e:bd:68:5d", "network": {"id": "5674f315-1fc1-48df-8016-8dfaddb1bbf5", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-878396495-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c5be31196e1f452e8768b57c105d1765", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9353371a-f1", "ovs_interfaceid": "9353371a-f186-4ea6-a770-95c9d40d2340", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 884.610842] env[68674]: DEBUG oslo_concurrency.lockutils [req-86368dde-9f93-46d4-8972-19d183304fda req-b3cb2711-d53d-42a0-855d-3c4e8080ef97 service nova] Acquired lock "refresh_cache-f147b483-9384-4fc1-996e-e8fb035c1942" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 884.611035] env[68674]: DEBUG nova.network.neutron [req-86368dde-9f93-46d4-8972-19d183304fda req-b3cb2711-d53d-42a0-855d-3c4e8080ef97 service nova] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Refreshing network info cache for port 9353371a-f186-4ea6-a770-95c9d40d2340 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 884.612322] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bd:68:5d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f5f016d1-34a6-4ebd-81ed-a6bf9d109b87', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9353371a-f186-4ea6-a770-95c9d40d2340', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 884.622112] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 884.623482] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 884.624346] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1701f59c-6cf5-4de8-b3c0-db82289acd24 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.653722] env[68674]: DEBUG oslo_vmware.api [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240243, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.635877} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.655186] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 7d953e59-53c1-4041-a641-35c12c012f7e/7d953e59-53c1-4041-a641-35c12c012f7e.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 884.655426] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 884.655930] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 884.655930] env[68674]: value = "task-3240245" [ 884.655930] env[68674]: _type = "Task" [ 884.655930] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.656132] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5c0b19e6-8a78-4777-93c6-3cbc0cccb488 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.668084] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240245, 'name': CreateVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.669637] env[68674]: DEBUG oslo_vmware.api [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 884.669637] env[68674]: value = "task-3240246" [ 884.669637] env[68674]: _type = "Task" [ 884.669637] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.679347] env[68674]: DEBUG oslo_vmware.api [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240246, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.855410] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 8790d635-fec5-4dcf-8cb0-220c2edec971] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 885.027777] env[68674]: DEBUG oslo_vmware.api [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240244, 'name': PowerOffVM_Task, 'duration_secs': 0.235559} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.028096] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 885.028273] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 885.028523] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a9cb3a52-b0ba-43a4-9c8a-0e55e1b537d5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.052954] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 885.053185] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 885.053533] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Deleting the datastore file [datastore1] 867fd9ca-049f-441a-94bc-af60df598043 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 885.055078] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0acdcd50-f086-413d-994e-0ca2ce6e557f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.064084] env[68674]: DEBUG oslo_vmware.api [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Waiting for the task: (returnval){ [ 885.064084] env[68674]: value = "task-3240248" [ 885.064084] env[68674]: _type = "Task" [ 885.064084] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.078617] env[68674]: DEBUG oslo_vmware.api [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240248, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.079949] env[68674]: DEBUG nova.network.neutron [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Updating instance_info_cache with network_info: [{"id": "d24d2f1b-cc82-45a9-8d5c-94505a4de39f", "address": "fa:16:3e:b3:08:90", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.109", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd24d2f1b-cc", "ovs_interfaceid": "d24d2f1b-cc82-45a9-8d5c-94505a4de39f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.090688] env[68674]: DEBUG nova.compute.manager [None req-631a7d69-2079-4f34-9281-5998e9322581 tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 885.090972] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-631a7d69-2079-4f34-9281-5998e9322581 tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 885.091652] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f84b932-d73e-4d88-9331-2af137f9d730 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.102985] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-631a7d69-2079-4f34-9281-5998e9322581 tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 885.103252] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4bc1b5f2-da0b-41e1-b992-94e27624d244 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.111676] env[68674]: DEBUG oslo_vmware.api [None req-631a7d69-2079-4f34-9281-5998e9322581 tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Waiting for the task: (returnval){ [ 885.111676] env[68674]: value = "task-3240249" [ 885.111676] env[68674]: _type = "Task" [ 885.111676] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.120304] env[68674]: DEBUG oslo_vmware.api [None req-631a7d69-2079-4f34-9281-5998e9322581 tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Task: {'id': task-3240249, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.174148] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240245, 'name': CreateVM_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.187615] env[68674]: DEBUG oslo_vmware.api [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240246, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.250041} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.187922] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 885.188870] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eb37080-7e29-4f86-b056-1bf76f5a54e4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.216765] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] 7d953e59-53c1-4041-a641-35c12c012f7e/7d953e59-53c1-4041-a641-35c12c012f7e.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 885.220297] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "refresh_cache-3d85c8c4-f09c-4f75-aff5-9a49d84ae006" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.220509] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquired lock "refresh_cache-3d85c8c4-f09c-4f75-aff5-9a49d84ae006" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 885.220733] env[68674]: DEBUG nova.network.neutron [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 885.222179] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d5eb2e95-5a7b-483c-9202-e8d81ba4e0a4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.250114] env[68674]: DEBUG oslo_vmware.api [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 885.250114] env[68674]: value = "task-3240250" [ 885.250114] env[68674]: _type = "Task" [ 885.250114] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.262166] env[68674]: DEBUG oslo_vmware.api [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240250, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.359567] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 60ded0c9-7e20-4071-b5ce-9189d8d01d5c] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 885.476522] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0838430f-a9c0-4679-96fa-fd507b839a93 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.484410] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4a4a8ce-6dc5-4689-a676-47485e6f56ec {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.518160] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05fbd2ee-5c71-45cc-b052-b6e469cf08f3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.526316] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23691a23-3abb-423c-994a-14299a2fa1d7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.540068] env[68674]: DEBUG nova.compute.provider_tree [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 885.572845] env[68674]: DEBUG oslo_vmware.api [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240248, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.240241} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.573142] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 885.573345] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 885.573527] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 885.573699] env[68674]: INFO nova.compute.manager [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Took 1.08 seconds to destroy the instance on the hypervisor. [ 885.573934] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 885.574441] env[68674]: DEBUG nova.compute.manager [-] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 885.574441] env[68674]: DEBUG nova.network.neutron [-] [instance: 867fd9ca-049f-441a-94bc-af60df598043] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 885.585883] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Releasing lock "refresh_cache-7aa58e2f-1202-4252-9c38-ce53084c573f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 885.611239] env[68674]: DEBUG nova.network.neutron [-] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 885.620017] env[68674]: DEBUG oslo_vmware.api [None req-631a7d69-2079-4f34-9281-5998e9322581 tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Task: {'id': task-3240249, 'name': PowerOffVM_Task, 'duration_secs': 0.29264} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.620017] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-631a7d69-2079-4f34-9281-5998e9322581 tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 885.620017] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-631a7d69-2079-4f34-9281-5998e9322581 tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 885.620017] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cf503d04-c464-45cb-8a02-49d7f490a160 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.639951] env[68674]: DEBUG nova.network.neutron [req-86368dde-9f93-46d4-8972-19d183304fda req-b3cb2711-d53d-42a0-855d-3c4e8080ef97 service nova] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Updated VIF entry in instance network info cache for port 9353371a-f186-4ea6-a770-95c9d40d2340. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 885.640414] env[68674]: DEBUG nova.network.neutron [req-86368dde-9f93-46d4-8972-19d183304fda req-b3cb2711-d53d-42a0-855d-3c4e8080ef97 service nova] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Updating instance_info_cache with network_info: [{"id": "9353371a-f186-4ea6-a770-95c9d40d2340", "address": "fa:16:3e:bd:68:5d", "network": {"id": "5674f315-1fc1-48df-8016-8dfaddb1bbf5", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-878396495-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c5be31196e1f452e8768b57c105d1765", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9353371a-f1", "ovs_interfaceid": "9353371a-f186-4ea6-a770-95c9d40d2340", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.675687] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240245, 'name': CreateVM_Task, 'duration_secs': 0.691379} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.676456] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 885.676881] env[68674]: DEBUG oslo_concurrency.lockutils [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.677117] env[68674]: DEBUG oslo_concurrency.lockutils [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 885.677506] env[68674]: DEBUG oslo_concurrency.lockutils [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 885.677810] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4015889-9637-49e9-bca6-fba693464f58 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.684183] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-631a7d69-2079-4f34-9281-5998e9322581 tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 885.684440] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-631a7d69-2079-4f34-9281-5998e9322581 tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 885.684755] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-631a7d69-2079-4f34-9281-5998e9322581 tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Deleting the datastore file [datastore1] 036fbca7-be6a-43c6-972e-a71524833498 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 885.684941] env[68674]: DEBUG oslo_vmware.api [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 885.684941] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521fa812-0498-a3b9-897c-41fd745285bd" [ 885.684941] env[68674]: _type = "Task" [ 885.684941] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.685143] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-313177ad-3123-4a9e-87ad-a6b63d62ecf2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.696290] env[68674]: DEBUG oslo_vmware.api [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521fa812-0498-a3b9-897c-41fd745285bd, 'name': SearchDatastore_Task, 'duration_secs': 0.0091} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.697480] env[68674]: DEBUG oslo_concurrency.lockutils [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 885.697714] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 885.697945] env[68674]: DEBUG oslo_concurrency.lockutils [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.698110] env[68674]: DEBUG oslo_concurrency.lockutils [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 885.698292] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 885.698590] env[68674]: DEBUG oslo_vmware.api [None req-631a7d69-2079-4f34-9281-5998e9322581 tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Waiting for the task: (returnval){ [ 885.698590] env[68674]: value = "task-3240252" [ 885.698590] env[68674]: _type = "Task" [ 885.698590] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.698768] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-68805ab1-b45c-4b82-ba57-96cd68760622 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.708015] env[68674]: DEBUG oslo_vmware.api [None req-631a7d69-2079-4f34-9281-5998e9322581 tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Task: {'id': task-3240252, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.708988] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 885.709201] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 885.709948] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b077a266-adc7-4628-af8f-91f0b8a08439 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.714543] env[68674]: DEBUG oslo_vmware.api [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 885.714543] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52cb2d3f-d437-3092-b7c0-8b5681160a05" [ 885.714543] env[68674]: _type = "Task" [ 885.714543] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.722380] env[68674]: DEBUG oslo_vmware.api [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52cb2d3f-d437-3092-b7c0-8b5681160a05, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.724299] env[68674]: DEBUG nova.network.neutron [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Updating instance_info_cache with network_info: [{"id": "1b7ed5a9-214f-4011-b73e-63954c02e25e", "address": "fa:16:3e:4f:37:80", "network": {"id": "cd9a6296-fa96-4117-b8b5-3884d0d16745", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1543887384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61ea6bfeb37d470a970e9c98e4827ade", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b7ed5a9-21", "ovs_interfaceid": "1b7ed5a9-214f-4011-b73e-63954c02e25e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.776211] env[68674]: DEBUG oslo_vmware.api [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240250, 'name': ReconfigVM_Task, 'duration_secs': 0.379142} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.776507] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Reconfigured VM instance instance-00000048 to attach disk [datastore1] 7d953e59-53c1-4041-a641-35c12c012f7e/7d953e59-53c1-4041-a641-35c12c012f7e.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 885.777171] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-43dff703-69d0-4c59-b29b-146016399c6b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.784253] env[68674]: DEBUG oslo_vmware.api [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 885.784253] env[68674]: value = "task-3240253" [ 885.784253] env[68674]: _type = "Task" [ 885.784253] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.792476] env[68674]: DEBUG oslo_vmware.api [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240253, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.862831] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 50bb7509-b7e9-4dc3-9746-acd46010cc26] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 886.045549] env[68674]: DEBUG nova.scheduler.client.report [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 886.111281] env[68674]: DEBUG nova.network.neutron [-] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.144297] env[68674]: DEBUG oslo_concurrency.lockutils [req-86368dde-9f93-46d4-8972-19d183304fda req-b3cb2711-d53d-42a0-855d-3c4e8080ef97 service nova] Releasing lock "refresh_cache-f147b483-9384-4fc1-996e-e8fb035c1942" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 886.213131] env[68674]: DEBUG oslo_vmware.api [None req-631a7d69-2079-4f34-9281-5998e9322581 tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Task: {'id': task-3240252, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171517} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.213378] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-631a7d69-2079-4f34-9281-5998e9322581 tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 886.213653] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-631a7d69-2079-4f34-9281-5998e9322581 tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 886.213838] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-631a7d69-2079-4f34-9281-5998e9322581 tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 886.214022] env[68674]: INFO nova.compute.manager [None req-631a7d69-2079-4f34-9281-5998e9322581 tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Took 1.12 seconds to destroy the instance on the hypervisor. [ 886.214293] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-631a7d69-2079-4f34-9281-5998e9322581 tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 886.214458] env[68674]: DEBUG nova.compute.manager [-] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 886.214551] env[68674]: DEBUG nova.network.neutron [-] [instance: 036fbca7-be6a-43c6-972e-a71524833498] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 886.226813] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Releasing lock "refresh_cache-3d85c8c4-f09c-4f75-aff5-9a49d84ae006" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 886.231471] env[68674]: DEBUG oslo_vmware.api [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52cb2d3f-d437-3092-b7c0-8b5681160a05, 'name': SearchDatastore_Task, 'duration_secs': 0.009052} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.231649] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37c1aada-5bf6-45e7-9951-b144752170c5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.237170] env[68674]: DEBUG oslo_vmware.api [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 886.237170] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524549c4-e51d-cdd8-1803-3b94c244afde" [ 886.237170] env[68674]: _type = "Task" [ 886.237170] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.244790] env[68674]: DEBUG oslo_vmware.api [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524549c4-e51d-cdd8-1803-3b94c244afde, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.294894] env[68674]: DEBUG oslo_vmware.api [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240253, 'name': Rename_Task, 'duration_secs': 0.167971} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.295129] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 886.295458] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a36b57d3-b91e-4a73-8921-dd49d9a49250 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.306172] env[68674]: DEBUG oslo_vmware.api [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 886.306172] env[68674]: value = "task-3240254" [ 886.306172] env[68674]: _type = "Task" [ 886.306172] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.318635] env[68674]: DEBUG oslo_vmware.api [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240254, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.366894] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 357b515d-ef37-4688-969e-f894be30edb7] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 886.550618] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.737s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 886.551216] env[68674]: DEBUG nova.compute.manager [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 886.554492] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0554f438-d8f4-4c3a-955d-98d0a296da5c tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.845s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 886.554748] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0554f438-d8f4-4c3a-955d-98d0a296da5c tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 886.556915] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.187s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 886.558601] env[68674]: INFO nova.compute.claims [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 886.601213] env[68674]: INFO nova.scheduler.client.report [None req-0554f438-d8f4-4c3a-955d-98d0a296da5c tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Deleted allocations for instance 2ed83aff-9a73-464b-914a-479d91cdfce0 [ 886.615040] env[68674]: INFO nova.compute.manager [-] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Took 1.04 seconds to deallocate network for instance. [ 886.748765] env[68674]: DEBUG oslo_vmware.api [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524549c4-e51d-cdd8-1803-3b94c244afde, 'name': SearchDatastore_Task, 'duration_secs': 0.009678} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.750087] env[68674]: DEBUG oslo_concurrency.lockutils [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 886.750502] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] f147b483-9384-4fc1-996e-e8fb035c1942/f147b483-9384-4fc1-996e-e8fb035c1942.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 886.751532] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdef54c9-d864-4019-b1c4-8f3739da88b7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.759118] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-35dca0c8-3d33-439f-84d4-3f6326fd6d94 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.781709] env[68674]: DEBUG oslo_vmware.api [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 886.781709] env[68674]: value = "task-3240255" [ 886.781709] env[68674]: _type = "Task" [ 886.781709] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.783211] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-addfb87d-a29b-4a4b-b8b0-2b64731538c7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.795195] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Updating instance '3d85c8c4-f09c-4f75-aff5-9a49d84ae006' progress to 83 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 886.802769] env[68674]: DEBUG oslo_vmware.api [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3240255, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.816131] env[68674]: DEBUG oslo_vmware.api [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240254, 'name': PowerOnVM_Task, 'duration_secs': 0.480221} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.816348] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 886.816527] env[68674]: INFO nova.compute.manager [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Took 7.24 seconds to spawn the instance on the hypervisor. [ 886.816744] env[68674]: DEBUG nova.compute.manager [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 886.817489] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da97d272-a5d2-4352-bf64-c406d762c731 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.869770] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 9e337960-78c1-4ddb-a6f6-d6fd57dbf86d] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 887.031214] env[68674]: DEBUG nova.compute.manager [req-394cb42e-df31-4b03-b0f7-1e492359795a req-2933c706-6081-4ea9-8d4a-3096b8870a68 service nova] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Received event network-vif-deleted-a46c478e-b4c3-4dbf-8882-05b024f6d89b {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 887.031821] env[68674]: INFO nova.compute.manager [req-394cb42e-df31-4b03-b0f7-1e492359795a req-2933c706-6081-4ea9-8d4a-3096b8870a68 service nova] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Neutron deleted interface a46c478e-b4c3-4dbf-8882-05b024f6d89b; detaching it from the instance and deleting it from the info cache [ 887.031821] env[68674]: DEBUG nova.network.neutron [req-394cb42e-df31-4b03-b0f7-1e492359795a req-2933c706-6081-4ea9-8d4a-3096b8870a68 service nova] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.066838] env[68674]: DEBUG nova.compute.utils [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 887.070301] env[68674]: DEBUG nova.compute.manager [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 887.070525] env[68674]: DEBUG nova.network.neutron [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 887.114793] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "2d02adff-9fbf-4889-99e4-4efde5a51b33" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 887.114793] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "2d02adff-9fbf-4889-99e4-4efde5a51b33" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 887.120025] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52819ea7-a50b-4e07-b422-1b4d8fb0b52d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.120025] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0554f438-d8f4-4c3a-955d-98d0a296da5c tempest-InstanceActionsV221TestJSON-1736511880 tempest-InstanceActionsV221TestJSON-1736511880-project-member] Lock "2ed83aff-9a73-464b-914a-479d91cdfce0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.808s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 887.122353] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 887.139800] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Updating instance '7aa58e2f-1202-4252-9c38-ce53084c573f' progress to 0 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 887.144339] env[68674]: DEBUG nova.policy [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e268da8edd47413b9b87909dde064f64', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0cee54e456084086866d08b098a24b64', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 887.294074] env[68674]: DEBUG oslo_vmware.api [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3240255, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.306479] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-cc3be05f-dabf-4a07-b758-ef1a45ebe3c6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Updating instance '3d85c8c4-f09c-4f75-aff5-9a49d84ae006' progress to 100 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 887.339770] env[68674]: INFO nova.compute.manager [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Took 57.43 seconds to build instance. [ 887.360349] env[68674]: DEBUG nova.network.neutron [-] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.373553] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: d1c7a508-7d45-4eff-bb06-b85bfe392772] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 887.535415] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0b354a2c-947e-449e-bdd5-095a760c54f3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.545854] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-963024ca-a85d-4bff-af0f-0a9fa35ac3ca {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.557764] env[68674]: DEBUG nova.network.neutron [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Successfully created port: 461dbfba-04af-43fe-b80d-c6c22078b46a {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 887.585993] env[68674]: DEBUG nova.compute.manager [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 887.588478] env[68674]: DEBUG nova.compute.manager [req-394cb42e-df31-4b03-b0f7-1e492359795a req-2933c706-6081-4ea9-8d4a-3096b8870a68 service nova] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Detach interface failed, port_id=a46c478e-b4c3-4dbf-8882-05b024f6d89b, reason: Instance 036fbca7-be6a-43c6-972e-a71524833498 could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 887.620970] env[68674]: DEBUG nova.compute.manager [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 887.651336] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 887.652552] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-24dab5f3-afec-4420-a167-e6d043171769 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.662374] env[68674]: DEBUG oslo_vmware.api [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 887.662374] env[68674]: value = "task-3240256" [ 887.662374] env[68674]: _type = "Task" [ 887.662374] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.668963] env[68674]: DEBUG oslo_vmware.api [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240256, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.794176] env[68674]: DEBUG oslo_vmware.api [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3240255, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.783425} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.798115] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] f147b483-9384-4fc1-996e-e8fb035c1942/f147b483-9384-4fc1-996e-e8fb035c1942.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 887.798565] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 887.798979] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-651d6b83-9dd5-441f-aa87-3a8efdb52675 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.810734] env[68674]: DEBUG oslo_vmware.api [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 887.810734] env[68674]: value = "task-3240257" [ 887.810734] env[68674]: _type = "Task" [ 887.810734] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.826435] env[68674]: DEBUG oslo_vmware.api [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3240257, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.832756] env[68674]: DEBUG nova.compute.manager [req-3ccb507c-f10c-4416-9fdf-b3160f115831 req-d8edc7dd-179c-48d4-9034-93799eebc0db service nova] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Received event network-changed-856fc34c-4049-4185-9ab1-8f86e2cfdeff {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 887.832756] env[68674]: DEBUG nova.compute.manager [req-3ccb507c-f10c-4416-9fdf-b3160f115831 req-d8edc7dd-179c-48d4-9034-93799eebc0db service nova] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Refreshing instance network info cache due to event network-changed-856fc34c-4049-4185-9ab1-8f86e2cfdeff. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 887.832756] env[68674]: DEBUG oslo_concurrency.lockutils [req-3ccb507c-f10c-4416-9fdf-b3160f115831 req-d8edc7dd-179c-48d4-9034-93799eebc0db service nova] Acquiring lock "refresh_cache-7d953e59-53c1-4041-a641-35c12c012f7e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.832990] env[68674]: DEBUG oslo_concurrency.lockutils [req-3ccb507c-f10c-4416-9fdf-b3160f115831 req-d8edc7dd-179c-48d4-9034-93799eebc0db service nova] Acquired lock "refresh_cache-7d953e59-53c1-4041-a641-35c12c012f7e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 887.832990] env[68674]: DEBUG nova.network.neutron [req-3ccb507c-f10c-4416-9fdf-b3160f115831 req-d8edc7dd-179c-48d4-9034-93799eebc0db service nova] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Refreshing network info cache for port 856fc34c-4049-4185-9ab1-8f86e2cfdeff {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 887.843404] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fac338c8-6288-476d-91d7-8977d81cab03 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lock "7d953e59-53c1-4041-a641-35c12c012f7e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.430s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 887.867266] env[68674]: INFO nova.compute.manager [-] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Took 1.65 seconds to deallocate network for instance. [ 887.882073] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 503e9328-bbd8-414f-8bea-250ed8247d67] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 888.148914] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 888.155836] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40e51c22-3c15-44bc-ba17-ef81339d019d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.167078] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b541498b-7245-42c5-9dc7-6a1bd881af06 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.172364] env[68674]: DEBUG oslo_vmware.api [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240256, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.207237] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c42876e7-1031-4371-bfd8-daac6e3d4896 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.216372] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e1a34a8-c214-42df-8bef-aa3b6ad2e69b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.231696] env[68674]: DEBUG nova.compute.provider_tree [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 888.320714] env[68674]: DEBUG oslo_vmware.api [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3240257, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074439} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.324585] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 888.325632] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95345739-5898-407f-8432-8e0fc126ab44 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.350396] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] f147b483-9384-4fc1-996e-e8fb035c1942/f147b483-9384-4fc1-996e-e8fb035c1942.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 888.350740] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4bd46048-f110-4238-ace3-8672cffe0ce0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.373202] env[68674]: DEBUG oslo_vmware.api [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 888.373202] env[68674]: value = "task-3240258" [ 888.373202] env[68674]: _type = "Task" [ 888.373202] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.378097] env[68674]: DEBUG oslo_concurrency.lockutils [None req-631a7d69-2079-4f34-9281-5998e9322581 tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 888.382432] env[68674]: DEBUG oslo_vmware.api [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3240258, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.388964] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 7fbe6b95-27fe-4aa2-b86c-f389ab00ad6b] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 888.596620] env[68674]: DEBUG nova.compute.manager [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 888.624162] env[68674]: DEBUG nova.virt.hardware [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 888.627219] env[68674]: DEBUG nova.virt.hardware [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 888.627219] env[68674]: DEBUG nova.virt.hardware [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 888.627219] env[68674]: DEBUG nova.virt.hardware [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 888.627219] env[68674]: DEBUG nova.virt.hardware [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 888.627219] env[68674]: DEBUG nova.virt.hardware [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 888.627219] env[68674]: DEBUG nova.virt.hardware [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 888.627219] env[68674]: DEBUG nova.virt.hardware [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 888.627219] env[68674]: DEBUG nova.virt.hardware [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 888.627219] env[68674]: DEBUG nova.virt.hardware [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 888.627219] env[68674]: DEBUG nova.virt.hardware [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 888.627219] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf121de8-80fe-41d3-ad53-e24ee4e662d4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.636219] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12a25a02-8514-4b67-935e-2c421dd950c8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.672557] env[68674]: DEBUG oslo_vmware.api [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240256, 'name': PowerOffVM_Task, 'duration_secs': 0.560002} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.672840] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 888.673023] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Updating instance '7aa58e2f-1202-4252-9c38-ce53084c573f' progress to 17 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 888.735502] env[68674]: DEBUG nova.scheduler.client.report [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 888.853588] env[68674]: DEBUG nova.network.neutron [req-3ccb507c-f10c-4416-9fdf-b3160f115831 req-d8edc7dd-179c-48d4-9034-93799eebc0db service nova] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Updated VIF entry in instance network info cache for port 856fc34c-4049-4185-9ab1-8f86e2cfdeff. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 888.854008] env[68674]: DEBUG nova.network.neutron [req-3ccb507c-f10c-4416-9fdf-b3160f115831 req-d8edc7dd-179c-48d4-9034-93799eebc0db service nova] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Updating instance_info_cache with network_info: [{"id": "856fc34c-4049-4185-9ab1-8f86e2cfdeff", "address": "fa:16:3e:28:1e:94", "network": {"id": "1674c138-dbec-4d03-b5b0-d1944ab38577", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-143958570-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.146", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a84d9d6e23bd40049c34e6f087252b4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap856fc34c-40", "ovs_interfaceid": "856fc34c-4049-4185-9ab1-8f86e2cfdeff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 888.884500] env[68674]: DEBUG oslo_vmware.api [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3240258, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.891718] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: a123c5f2-e775-4dd2-9a5a-35e7d6705dfa] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 889.179187] env[68674]: DEBUG nova.virt.hardware [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 889.179424] env[68674]: DEBUG nova.virt.hardware [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 889.179771] env[68674]: DEBUG nova.virt.hardware [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 889.179771] env[68674]: DEBUG nova.virt.hardware [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 889.179944] env[68674]: DEBUG nova.virt.hardware [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 889.183483] env[68674]: DEBUG nova.virt.hardware [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 889.183828] env[68674]: DEBUG nova.virt.hardware [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 889.184035] env[68674]: DEBUG nova.virt.hardware [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 889.184199] env[68674]: DEBUG nova.virt.hardware [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 889.184368] env[68674]: DEBUG nova.virt.hardware [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 889.184625] env[68674]: DEBUG nova.virt.hardware [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 889.190201] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a7de38d-bbd8-40bd-809e-a2dd76f52cde {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.211797] env[68674]: DEBUG oslo_vmware.api [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 889.211797] env[68674]: value = "task-3240259" [ 889.211797] env[68674]: _type = "Task" [ 889.211797] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.229988] env[68674]: DEBUG oslo_vmware.api [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240259, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.241342] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.684s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 889.242095] env[68674]: DEBUG nova.compute.manager [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 889.247993] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.221s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 889.249809] env[68674]: INFO nova.compute.claims [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 889.314532] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquiring lock "63d6c185-db2c-4ede-a716-9a0dd432ab1f" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 889.314827] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "63d6c185-db2c-4ede-a716-9a0dd432ab1f" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 889.315034] env[68674]: INFO nova.compute.manager [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Shelving [ 889.325893] env[68674]: DEBUG nova.compute.manager [req-c7fa1343-9749-4121-b3e6-556334d6d211 req-6876366c-105d-4c94-9d34-8cb137f9ce02 service nova] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Received event network-vif-plugged-461dbfba-04af-43fe-b80d-c6c22078b46a {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 889.326128] env[68674]: DEBUG oslo_concurrency.lockutils [req-c7fa1343-9749-4121-b3e6-556334d6d211 req-6876366c-105d-4c94-9d34-8cb137f9ce02 service nova] Acquiring lock "f6d28c5e-fe32-4c53-98ac-747a1b79e6c4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 889.326335] env[68674]: DEBUG oslo_concurrency.lockutils [req-c7fa1343-9749-4121-b3e6-556334d6d211 req-6876366c-105d-4c94-9d34-8cb137f9ce02 service nova] Lock "f6d28c5e-fe32-4c53-98ac-747a1b79e6c4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 889.326506] env[68674]: DEBUG oslo_concurrency.lockutils [req-c7fa1343-9749-4121-b3e6-556334d6d211 req-6876366c-105d-4c94-9d34-8cb137f9ce02 service nova] Lock "f6d28c5e-fe32-4c53-98ac-747a1b79e6c4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 889.326674] env[68674]: DEBUG nova.compute.manager [req-c7fa1343-9749-4121-b3e6-556334d6d211 req-6876366c-105d-4c94-9d34-8cb137f9ce02 service nova] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] No waiting events found dispatching network-vif-plugged-461dbfba-04af-43fe-b80d-c6c22078b46a {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 889.326838] env[68674]: WARNING nova.compute.manager [req-c7fa1343-9749-4121-b3e6-556334d6d211 req-6876366c-105d-4c94-9d34-8cb137f9ce02 service nova] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Received unexpected event network-vif-plugged-461dbfba-04af-43fe-b80d-c6c22078b46a for instance with vm_state building and task_state spawning. [ 889.359909] env[68674]: DEBUG oslo_concurrency.lockutils [req-3ccb507c-f10c-4416-9fdf-b3160f115831 req-d8edc7dd-179c-48d4-9034-93799eebc0db service nova] Releasing lock "refresh_cache-7d953e59-53c1-4041-a641-35c12c012f7e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 889.393950] env[68674]: DEBUG oslo_vmware.api [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3240258, 'name': ReconfigVM_Task, 'duration_secs': 0.886931} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.394379] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 160d9aa2-048d-45a2-ab55-581c8721ac3b] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 889.398497] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Reconfigured VM instance instance-00000049 to attach disk [datastore2] f147b483-9384-4fc1-996e-e8fb035c1942/f147b483-9384-4fc1-996e-e8fb035c1942.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 889.398497] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5bc4e6be-9b73-45a1-a8d5-10390ec3ba7a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.409253] env[68674]: DEBUG oslo_vmware.api [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 889.409253] env[68674]: value = "task-3240260" [ 889.409253] env[68674]: _type = "Task" [ 889.409253] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.424322] env[68674]: DEBUG oslo_vmware.api [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3240260, 'name': Rename_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.531657] env[68674]: DEBUG nova.network.neutron [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Successfully updated port: 461dbfba-04af-43fe-b80d-c6c22078b46a {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 889.725390] env[68674]: DEBUG oslo_vmware.api [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240259, 'name': ReconfigVM_Task, 'duration_secs': 0.183884} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.725842] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Updating instance '7aa58e2f-1202-4252-9c38-ce53084c573f' progress to 33 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 889.754904] env[68674]: DEBUG nova.compute.utils [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 889.758451] env[68674]: DEBUG nova.compute.manager [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 889.758625] env[68674]: DEBUG nova.network.neutron [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 889.828464] env[68674]: DEBUG nova.policy [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5b5a988ac9d24d1492c499e210f61be7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4fbaef606f1948db867cd3a0c5ff3692', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 889.898371] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: e75d2bc7-f356-4443-9641-d9ebf35843cd] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 889.918744] env[68674]: DEBUG oslo_vmware.api [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3240260, 'name': Rename_Task, 'duration_secs': 0.14346} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.919040] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 889.919299] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5fe7fedb-5f4f-42d0-b181-cbc2b3390204 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.927296] env[68674]: DEBUG oslo_vmware.api [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 889.927296] env[68674]: value = "task-3240261" [ 889.927296] env[68674]: _type = "Task" [ 889.927296] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.935801] env[68674]: DEBUG oslo_vmware.api [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3240261, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.035738] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "refresh_cache-f6d28c5e-fe32-4c53-98ac-747a1b79e6c4" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.035738] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquired lock "refresh_cache-f6d28c5e-fe32-4c53-98ac-747a1b79e6c4" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 890.035738] env[68674]: DEBUG nova.network.neutron [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 890.082035] env[68674]: DEBUG oslo_concurrency.lockutils [None req-94bf0518-d032-462c-99ff-8ed7f0ae2ed4 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "3d85c8c4-f09c-4f75-aff5-9a49d84ae006" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 890.082035] env[68674]: DEBUG oslo_concurrency.lockutils [None req-94bf0518-d032-462c-99ff-8ed7f0ae2ed4 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "3d85c8c4-f09c-4f75-aff5-9a49d84ae006" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 890.082035] env[68674]: DEBUG nova.compute.manager [None req-94bf0518-d032-462c-99ff-8ed7f0ae2ed4 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Going to confirm migration 3 {{(pid=68674) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 890.236595] env[68674]: DEBUG nova.virt.hardware [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:10:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='0763b137-0ce8-4652-8505-6b8377dc2900',id=39,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-684543547',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 890.236595] env[68674]: DEBUG nova.virt.hardware [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 890.236595] env[68674]: DEBUG nova.virt.hardware [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 890.236595] env[68674]: DEBUG nova.virt.hardware [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 890.236595] env[68674]: DEBUG nova.virt.hardware [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 890.236595] env[68674]: DEBUG nova.virt.hardware [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 890.236595] env[68674]: DEBUG nova.virt.hardware [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 890.236595] env[68674]: DEBUG nova.virt.hardware [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 890.236930] env[68674]: DEBUG nova.virt.hardware [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 890.236930] env[68674]: DEBUG nova.virt.hardware [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 890.237070] env[68674]: DEBUG nova.virt.hardware [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 890.243619] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Reconfiguring VM instance instance-0000003f to detach disk 2000 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 890.243853] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-623a8334-f66d-4e33-b3ed-ac0c203b765e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.263737] env[68674]: DEBUG nova.compute.manager [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 890.279010] env[68674]: DEBUG oslo_vmware.api [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 890.279010] env[68674]: value = "task-3240262" [ 890.279010] env[68674]: _type = "Task" [ 890.279010] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.290572] env[68674]: DEBUG oslo_vmware.api [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240262, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.324822] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 890.325517] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c53e062d-b9d8-4c64-8ace-afb445fea0eb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.335738] env[68674]: DEBUG nova.network.neutron [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Successfully created port: b936c294-4b85-4fcd-9151-358b4db9d08d {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 890.339678] env[68674]: DEBUG oslo_vmware.api [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 890.339678] env[68674]: value = "task-3240263" [ 890.339678] env[68674]: _type = "Task" [ 890.339678] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.352216] env[68674]: DEBUG oslo_vmware.api [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240263, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.407035] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 02d4aee3-7267-4658-a277-8a9a00dd9f6e] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 890.438882] env[68674]: DEBUG oslo_vmware.api [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3240261, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.588901] env[68674]: DEBUG nova.network.neutron [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 890.680029] env[68674]: DEBUG oslo_concurrency.lockutils [None req-94bf0518-d032-462c-99ff-8ed7f0ae2ed4 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "refresh_cache-3d85c8c4-f09c-4f75-aff5-9a49d84ae006" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.680029] env[68674]: DEBUG oslo_concurrency.lockutils [None req-94bf0518-d032-462c-99ff-8ed7f0ae2ed4 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquired lock "refresh_cache-3d85c8c4-f09c-4f75-aff5-9a49d84ae006" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 890.680029] env[68674]: DEBUG nova.network.neutron [None req-94bf0518-d032-462c-99ff-8ed7f0ae2ed4 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 890.680029] env[68674]: DEBUG nova.objects.instance [None req-94bf0518-d032-462c-99ff-8ed7f0ae2ed4 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lazy-loading 'info_cache' on Instance uuid 3d85c8c4-f09c-4f75-aff5-9a49d84ae006 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 890.793518] env[68674]: DEBUG oslo_vmware.api [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240262, 'name': ReconfigVM_Task, 'duration_secs': 0.204707} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.794051] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Reconfigured VM instance instance-0000003f to detach disk 2000 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 890.794644] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-721c1b63-dbc2-4dc1-b84e-7aa967f36c09 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.821750] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Reconfiguring VM instance instance-0000003f to attach disk [datastore2] 7aa58e2f-1202-4252-9c38-ce53084c573f/7aa58e2f-1202-4252-9c38-ce53084c573f.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 890.827477] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7fe4ade4-0c8d-48ae-8f55-3f8221425580 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.852493] env[68674]: DEBUG oslo_vmware.api [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240263, 'name': PowerOffVM_Task, 'duration_secs': 0.208532} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.857107] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 890.857896] env[68674]: DEBUG oslo_vmware.api [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 890.857896] env[68674]: value = "task-3240264" [ 890.857896] env[68674]: _type = "Task" [ 890.857896] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.859097] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-810163f0-1282-4ad8-9869-6367b8c2dad9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.870476] env[68674]: DEBUG oslo_vmware.api [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240264, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.886249] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-126f2b0b-3481-403a-8445-b649b66fe6db {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.890697] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9845748-ab84-4c1f-8888-da5194f76fc6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.903922] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bdc7e2d-8d9e-44a5-9f4c-3fce4bd5501d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.937633] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 89ccc16e-d0e5-4f7d-985c-8693188e7ed5] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 890.940879] env[68674]: DEBUG nova.network.neutron [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Updating instance_info_cache with network_info: [{"id": "461dbfba-04af-43fe-b80d-c6c22078b46a", "address": "fa:16:3e:4a:f7:50", "network": {"id": "c0c4733f-8d0b-4cee-883f-2ad57ed16158", "bridge": "br-int", "label": "tempest-ServersTestJSON-40114649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cee54e456084086866d08b098a24b64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap461dbfba-04", "ovs_interfaceid": "461dbfba-04af-43fe-b80d-c6c22078b46a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 890.945673] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e265856b-8283-46a2-a428-1997271a550c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.957331] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f71303a2-1d00-46ec-b8fa-08f17c3fe92e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.961895] env[68674]: DEBUG oslo_vmware.api [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3240261, 'name': PowerOnVM_Task, 'duration_secs': 0.678034} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.962726] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 890.962956] env[68674]: INFO nova.compute.manager [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Took 8.81 seconds to spawn the instance on the hypervisor. [ 890.963151] env[68674]: DEBUG nova.compute.manager [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 890.964316] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7636cd8-972e-4b04-bc4c-9b491834e962 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.975751] env[68674]: DEBUG nova.compute.provider_tree [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 891.277418] env[68674]: DEBUG nova.compute.manager [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 891.299020] env[68674]: DEBUG nova.virt.hardware [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 891.299293] env[68674]: DEBUG nova.virt.hardware [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 891.299450] env[68674]: DEBUG nova.virt.hardware [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 891.299635] env[68674]: DEBUG nova.virt.hardware [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 891.299784] env[68674]: DEBUG nova.virt.hardware [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 891.299930] env[68674]: DEBUG nova.virt.hardware [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 891.300150] env[68674]: DEBUG nova.virt.hardware [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 891.300312] env[68674]: DEBUG nova.virt.hardware [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 891.300489] env[68674]: DEBUG nova.virt.hardware [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 891.300648] env[68674]: DEBUG nova.virt.hardware [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 891.300821] env[68674]: DEBUG nova.virt.hardware [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 891.301688] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32236377-9817-4532-9f4b-c54f5be70056 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.310012] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c766326-9d43-4003-8670-83e07ae180b1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.355176] env[68674]: DEBUG nova.compute.manager [req-ac21fc56-f3fe-4a60-81f1-afa0631aabff req-54b51240-70a5-45f9-94d4-415eab28ec20 service nova] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Received event network-changed-461dbfba-04af-43fe-b80d-c6c22078b46a {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 891.355367] env[68674]: DEBUG nova.compute.manager [req-ac21fc56-f3fe-4a60-81f1-afa0631aabff req-54b51240-70a5-45f9-94d4-415eab28ec20 service nova] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Refreshing instance network info cache due to event network-changed-461dbfba-04af-43fe-b80d-c6c22078b46a. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 891.355601] env[68674]: DEBUG oslo_concurrency.lockutils [req-ac21fc56-f3fe-4a60-81f1-afa0631aabff req-54b51240-70a5-45f9-94d4-415eab28ec20 service nova] Acquiring lock "refresh_cache-f6d28c5e-fe32-4c53-98ac-747a1b79e6c4" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.371576] env[68674]: DEBUG oslo_vmware.api [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240264, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.409778] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Creating Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 891.410244] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c6020666-2bd4-4ee1-b368-6ca62634cae5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.417676] env[68674]: DEBUG oslo_vmware.api [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 891.417676] env[68674]: value = "task-3240265" [ 891.417676] env[68674]: _type = "Task" [ 891.417676] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.426425] env[68674]: DEBUG oslo_vmware.api [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240265, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.443107] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: e84db5bd-b6ec-42ef-9c34-a4160c44d973] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 891.449028] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Releasing lock "refresh_cache-f6d28c5e-fe32-4c53-98ac-747a1b79e6c4" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 891.449332] env[68674]: DEBUG nova.compute.manager [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Instance network_info: |[{"id": "461dbfba-04af-43fe-b80d-c6c22078b46a", "address": "fa:16:3e:4a:f7:50", "network": {"id": "c0c4733f-8d0b-4cee-883f-2ad57ed16158", "bridge": "br-int", "label": "tempest-ServersTestJSON-40114649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cee54e456084086866d08b098a24b64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap461dbfba-04", "ovs_interfaceid": "461dbfba-04af-43fe-b80d-c6c22078b46a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 891.449657] env[68674]: DEBUG oslo_concurrency.lockutils [req-ac21fc56-f3fe-4a60-81f1-afa0631aabff req-54b51240-70a5-45f9-94d4-415eab28ec20 service nova] Acquired lock "refresh_cache-f6d28c5e-fe32-4c53-98ac-747a1b79e6c4" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 891.449842] env[68674]: DEBUG nova.network.neutron [req-ac21fc56-f3fe-4a60-81f1-afa0631aabff req-54b51240-70a5-45f9-94d4-415eab28ec20 service nova] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Refreshing network info cache for port 461dbfba-04af-43fe-b80d-c6c22078b46a {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 891.451079] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4a:f7:50', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '461dbfba-04af-43fe-b80d-c6c22078b46a', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 891.460833] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 891.464280] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 891.464731] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-07eb1464-1b77-4834-abe5-d7f2629a557a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.481660] env[68674]: DEBUG nova.scheduler.client.report [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 891.494082] env[68674]: INFO nova.compute.manager [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Took 50.88 seconds to build instance. [ 891.496860] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 891.496860] env[68674]: value = "task-3240266" [ 891.496860] env[68674]: _type = "Task" [ 891.496860] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.512608] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240266, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.770186] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Acquiring lock "245089a5-929d-49b0-aa36-749d342e8473" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 891.770526] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Lock "245089a5-929d-49b0-aa36-749d342e8473" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 891.872469] env[68674]: DEBUG oslo_vmware.api [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240264, 'name': ReconfigVM_Task, 'duration_secs': 0.691704} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.875221] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Reconfigured VM instance instance-0000003f to attach disk [datastore2] 7aa58e2f-1202-4252-9c38-ce53084c573f/7aa58e2f-1202-4252-9c38-ce53084c573f.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 891.875567] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Updating instance '7aa58e2f-1202-4252-9c38-ce53084c573f' progress to 50 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 891.929316] env[68674]: DEBUG oslo_vmware.api [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240265, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.946390] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 58830b0e-dbf3-424d-8b9a-bb298b6bea21] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 891.993838] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.746s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 891.994506] env[68674]: DEBUG nova.compute.manager [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 891.998039] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6790a86d-6cdc-45f5-b6ed-4e2474fc2346 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.679s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 891.998289] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6790a86d-6cdc-45f5-b6ed-4e2474fc2346 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.000485] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c85eb1b3-03d1-4dc8-a507-8c579384834c tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.883s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.000689] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c85eb1b3-03d1-4dc8-a507-8c579384834c tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.002366] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4075a884-e932-4551-ba25-d764dd3cbdba tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.513s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.002600] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4075a884-e932-4551-ba25-d764dd3cbdba tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.004647] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5ce4fdb8-498d-4a95-aa68-6844fa5a0d0e tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.450s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.004878] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5ce4fdb8-498d-4a95-aa68-6844fa5a0d0e tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.006630] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f613c16b-e449-41e1-85cf-c9f1425c4245 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.904s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.006894] env[68674]: DEBUG nova.objects.instance [None req-f613c16b-e449-41e1-85cf-c9f1425c4245 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Lazy-loading 'resources' on Instance uuid 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 892.008676] env[68674]: DEBUG oslo_concurrency.lockutils [None req-633e8ebb-0cf5-4d0b-82de-a2cbd8b25da4 tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Lock "f147b483-9384-4fc1-996e-e8fb035c1942" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.213s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.019591] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240266, 'name': CreateVM_Task, 'duration_secs': 0.378168} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.027509] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 892.028600] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.028766] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 892.030134] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 892.032508] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4ae19f1-f50f-4af5-b48a-86c788a62eaf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.038816] env[68674]: DEBUG oslo_vmware.api [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 892.038816] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52fcc9fa-87d7-b804-0bca-3bfd00fe7ca7" [ 892.038816] env[68674]: _type = "Task" [ 892.038816] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.040069] env[68674]: INFO nova.scheduler.client.report [None req-5ce4fdb8-498d-4a95-aa68-6844fa5a0d0e tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Deleted allocations for instance baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82 [ 892.046663] env[68674]: INFO nova.scheduler.client.report [None req-6790a86d-6cdc-45f5-b6ed-4e2474fc2346 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Deleted allocations for instance 6803af03-b1d5-47e6-9471-5213469e4103 [ 892.059060] env[68674]: INFO nova.scheduler.client.report [None req-c85eb1b3-03d1-4dc8-a507-8c579384834c tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Deleted allocations for instance f3e7cacd-20d3-4dbe-89b0-80d89173069a [ 892.069407] env[68674]: DEBUG oslo_vmware.api [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52fcc9fa-87d7-b804-0bca-3bfd00fe7ca7, 'name': SearchDatastore_Task, 'duration_secs': 0.010905} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.070647] env[68674]: INFO nova.scheduler.client.report [None req-4075a884-e932-4551-ba25-d764dd3cbdba tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Deleted allocations for instance 3a0a7950-af31-4a20-a19d-44fbce8735a2 [ 892.071752] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 892.072272] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 892.072736] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.073019] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 892.074081] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 892.075784] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-30d4735e-305b-4116-92c0-d975ba28972e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.088278] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 892.088889] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 892.089401] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b549048-aff7-4bb6-955c-f862e413ad42 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.095918] env[68674]: DEBUG oslo_vmware.api [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 892.095918] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ecba5c-cf94-5855-23b9-74dffb8e1235" [ 892.095918] env[68674]: _type = "Task" [ 892.095918] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.105591] env[68674]: DEBUG oslo_vmware.api [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ecba5c-cf94-5855-23b9-74dffb8e1235, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.274555] env[68674]: DEBUG nova.compute.manager [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 892.281501] env[68674]: DEBUG nova.network.neutron [req-ac21fc56-f3fe-4a60-81f1-afa0631aabff req-54b51240-70a5-45f9-94d4-415eab28ec20 service nova] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Updated VIF entry in instance network info cache for port 461dbfba-04af-43fe-b80d-c6c22078b46a. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 892.284037] env[68674]: DEBUG nova.network.neutron [req-ac21fc56-f3fe-4a60-81f1-afa0631aabff req-54b51240-70a5-45f9-94d4-415eab28ec20 service nova] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Updating instance_info_cache with network_info: [{"id": "461dbfba-04af-43fe-b80d-c6c22078b46a", "address": "fa:16:3e:4a:f7:50", "network": {"id": "c0c4733f-8d0b-4cee-883f-2ad57ed16158", "bridge": "br-int", "label": "tempest-ServersTestJSON-40114649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cee54e456084086866d08b098a24b64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap461dbfba-04", "ovs_interfaceid": "461dbfba-04af-43fe-b80d-c6c22078b46a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.284037] env[68674]: WARNING oslo_messaging._drivers.amqpdriver [req-ac21fc56-f3fe-4a60-81f1-afa0631aabff req-54b51240-70a5-45f9-94d4-415eab28ec20 service nova] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 892.295098] env[68674]: DEBUG nova.compute.manager [req-0b49bbb0-a719-4428-a9fc-10b0e7ef81e1 req-232833c4-05f0-44cb-8d79-22fa729d48fc service nova] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Received event network-vif-plugged-b936c294-4b85-4fcd-9151-358b4db9d08d {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 892.295098] env[68674]: DEBUG oslo_concurrency.lockutils [req-0b49bbb0-a719-4428-a9fc-10b0e7ef81e1 req-232833c4-05f0-44cb-8d79-22fa729d48fc service nova] Acquiring lock "1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 892.295098] env[68674]: DEBUG oslo_concurrency.lockutils [req-0b49bbb0-a719-4428-a9fc-10b0e7ef81e1 req-232833c4-05f0-44cb-8d79-22fa729d48fc service nova] Lock "1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.295687] env[68674]: DEBUG oslo_concurrency.lockutils [req-0b49bbb0-a719-4428-a9fc-10b0e7ef81e1 req-232833c4-05f0-44cb-8d79-22fa729d48fc service nova] Lock "1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.295841] env[68674]: DEBUG nova.compute.manager [req-0b49bbb0-a719-4428-a9fc-10b0e7ef81e1 req-232833c4-05f0-44cb-8d79-22fa729d48fc service nova] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] No waiting events found dispatching network-vif-plugged-b936c294-4b85-4fcd-9151-358b4db9d08d {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 892.296120] env[68674]: WARNING nova.compute.manager [req-0b49bbb0-a719-4428-a9fc-10b0e7ef81e1 req-232833c4-05f0-44cb-8d79-22fa729d48fc service nova] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Received unexpected event network-vif-plugged-b936c294-4b85-4fcd-9151-358b4db9d08d for instance with vm_state building and task_state spawning. [ 892.373953] env[68674]: DEBUG nova.network.neutron [None req-94bf0518-d032-462c-99ff-8ed7f0ae2ed4 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Updating instance_info_cache with network_info: [{"id": "1b7ed5a9-214f-4011-b73e-63954c02e25e", "address": "fa:16:3e:4f:37:80", "network": {"id": "cd9a6296-fa96-4117-b8b5-3884d0d16745", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1543887384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61ea6bfeb37d470a970e9c98e4827ade", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b7ed5a9-21", "ovs_interfaceid": "1b7ed5a9-214f-4011-b73e-63954c02e25e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.384729] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-716eab69-1b53-481c-ab4a-28fd07d78eae {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.406189] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77a2927f-ddbe-498d-a776-6b0476bf84ad {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.424405] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Updating instance '7aa58e2f-1202-4252-9c38-ce53084c573f' progress to 67 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 892.439358] env[68674]: DEBUG oslo_vmware.api [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240265, 'name': CreateSnapshot_Task, 'duration_secs': 0.733201} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.439358] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Created Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 892.439358] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4972f9ac-abd3-43ed-ac02-438541a9691a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.453024] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 0d085a22-ef61-4ecd-a4c3-3be4b0fa6fae] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 892.511069] env[68674]: DEBUG nova.compute.utils [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 892.515866] env[68674]: DEBUG nova.compute.manager [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 892.515866] env[68674]: DEBUG nova.network.neutron [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 892.557366] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6790a86d-6cdc-45f5-b6ed-4e2474fc2346 tempest-ListImageFiltersTestJSON-774022952 tempest-ListImageFiltersTestJSON-774022952-project-member] Lock "6803af03-b1d5-47e6-9471-5213469e4103" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.316s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.562817] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5ce4fdb8-498d-4a95-aa68-6844fa5a0d0e tempest-ServersAdminTestJSON-1774221260 tempest-ServersAdminTestJSON-1774221260-project-member] Lock "baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.709s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.569652] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c85eb1b3-03d1-4dc8-a507-8c579384834c tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "f3e7cacd-20d3-4dbe-89b0-80d89173069a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.890s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.581779] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4075a884-e932-4551-ba25-d764dd3cbdba tempest-MultipleCreateTestJSON-1685112952 tempest-MultipleCreateTestJSON-1685112952-project-member] Lock "3a0a7950-af31-4a20-a19d-44fbce8735a2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.785s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.611635] env[68674]: DEBUG oslo_vmware.api [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ecba5c-cf94-5855-23b9-74dffb8e1235, 'name': SearchDatastore_Task, 'duration_secs': 0.009931} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.615128] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e28f5bd-3671-42ca-9fa7-a85641ba4fc4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.621502] env[68674]: DEBUG nova.policy [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fd6c4d1912754a2ea44a65b455b7413c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '21163cbc3a5a4dc3abc832c4560c33e2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 892.624847] env[68674]: DEBUG oslo_vmware.api [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 892.624847] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524d24e2-5576-1f10-cb58-d32e997cb4d3" [ 892.624847] env[68674]: _type = "Task" [ 892.624847] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.635874] env[68674]: DEBUG oslo_vmware.api [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524d24e2-5576-1f10-cb58-d32e997cb4d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.756823] env[68674]: DEBUG oslo_concurrency.lockutils [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Acquiring lock "f147b483-9384-4fc1-996e-e8fb035c1942" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 892.757262] env[68674]: DEBUG oslo_concurrency.lockutils [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Lock "f147b483-9384-4fc1-996e-e8fb035c1942" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.757489] env[68674]: DEBUG oslo_concurrency.lockutils [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Acquiring lock "f147b483-9384-4fc1-996e-e8fb035c1942-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 892.757680] env[68674]: DEBUG oslo_concurrency.lockutils [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Lock "f147b483-9384-4fc1-996e-e8fb035c1942-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.757834] env[68674]: DEBUG oslo_concurrency.lockutils [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Lock "f147b483-9384-4fc1-996e-e8fb035c1942-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.759821] env[68674]: INFO nova.compute.manager [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Terminating instance [ 892.785677] env[68674]: DEBUG oslo_concurrency.lockutils [req-ac21fc56-f3fe-4a60-81f1-afa0631aabff req-54b51240-70a5-45f9-94d4-415eab28ec20 service nova] Releasing lock "refresh_cache-f6d28c5e-fe32-4c53-98ac-747a1b79e6c4" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 892.800048] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 892.876712] env[68674]: DEBUG oslo_concurrency.lockutils [None req-94bf0518-d032-462c-99ff-8ed7f0ae2ed4 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Releasing lock "refresh_cache-3d85c8c4-f09c-4f75-aff5-9a49d84ae006" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 892.876985] env[68674]: DEBUG nova.objects.instance [None req-94bf0518-d032-462c-99ff-8ed7f0ae2ed4 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lazy-loading 'migration_context' on Instance uuid 3d85c8c4-f09c-4f75-aff5-9a49d84ae006 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 892.963743] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Creating linked-clone VM from snapshot {{(pid=68674) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 892.965523] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e162abe4-0150-4b4c-a3f0-1ac9691bfa98 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.969706] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 892.969812] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Cleaning up deleted instances with incomplete migration {{(pid=68674) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11872}} [ 892.971562] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a352163-eccb-41f4-bdc7-73bdfd3e045a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.975638] env[68674]: DEBUG nova.network.neutron [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Successfully updated port: b936c294-4b85-4fcd-9151-358b4db9d08d {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 892.985062] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f4cb399-1ce1-4b31-b9cd-38ba240d486d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.989802] env[68674]: DEBUG oslo_vmware.api [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 892.989802] env[68674]: value = "task-3240267" [ 892.989802] env[68674]: _type = "Task" [ 892.989802] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.020149] env[68674]: DEBUG nova.compute.manager [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 893.024305] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f5fb6a7-23ef-425a-b37b-b2687544f776 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.030483] env[68674]: DEBUG oslo_vmware.api [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240267, 'name': CloneVM_Task} progress is 16%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.035809] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cd2c20f-b598-4905-99ee-b2b405f08a07 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.052511] env[68674]: DEBUG nova.compute.provider_tree [None req-f613c16b-e449-41e1-85cf-c9f1425c4245 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 893.103240] env[68674]: DEBUG nova.network.neutron [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Port d24d2f1b-cc82-45a9-8d5c-94505a4de39f binding to destination host cpu-1 is already ACTIVE {{(pid=68674) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 893.139374] env[68674]: DEBUG oslo_vmware.api [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524d24e2-5576-1f10-cb58-d32e997cb4d3, 'name': SearchDatastore_Task, 'duration_secs': 0.012522} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.139759] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 893.140059] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] f6d28c5e-fe32-4c53-98ac-747a1b79e6c4/f6d28c5e-fe32-4c53-98ac-747a1b79e6c4.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 893.140383] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1c9ce5e2-e96f-4a4b-8531-151f84388412 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.147320] env[68674]: DEBUG oslo_vmware.api [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 893.147320] env[68674]: value = "task-3240268" [ 893.147320] env[68674]: _type = "Task" [ 893.147320] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.157225] env[68674]: DEBUG oslo_vmware.api [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240268, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.265869] env[68674]: DEBUG nova.compute.manager [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 893.266321] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 893.267118] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-429439ca-32aa-4498-957a-7b73ac57a20c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.279905] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 893.279973] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a32e8a1b-5061-4fe2-a902-6de5219f0aea {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.287389] env[68674]: DEBUG oslo_vmware.api [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 893.287389] env[68674]: value = "task-3240269" [ 893.287389] env[68674]: _type = "Task" [ 893.287389] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.299082] env[68674]: DEBUG oslo_vmware.api [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3240269, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.382957] env[68674]: DEBUG nova.objects.base [None req-94bf0518-d032-462c-99ff-8ed7f0ae2ed4 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Object Instance<3d85c8c4-f09c-4f75-aff5-9a49d84ae006> lazy-loaded attributes: info_cache,migration_context {{(pid=68674) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 893.384669] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f438c8b-c0ec-4435-8a12-87ede2dff19a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.422087] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7dda994d-eabc-47ab-a6ee-935e9f94bc1e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.426857] env[68674]: DEBUG nova.compute.manager [req-176e388d-43b8-4357-9560-615060dbefb2 req-e0f5c27d-d6b9-4969-bbdc-4aa64d2b0308 service nova] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Received event network-changed-b936c294-4b85-4fcd-9151-358b4db9d08d {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 893.427030] env[68674]: DEBUG nova.compute.manager [req-176e388d-43b8-4357-9560-615060dbefb2 req-e0f5c27d-d6b9-4969-bbdc-4aa64d2b0308 service nova] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Refreshing instance network info cache due to event network-changed-b936c294-4b85-4fcd-9151-358b4db9d08d. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 893.427337] env[68674]: DEBUG oslo_concurrency.lockutils [req-176e388d-43b8-4357-9560-615060dbefb2 req-e0f5c27d-d6b9-4969-bbdc-4aa64d2b0308 service nova] Acquiring lock "refresh_cache-1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.427697] env[68674]: DEBUG oslo_concurrency.lockutils [req-176e388d-43b8-4357-9560-615060dbefb2 req-e0f5c27d-d6b9-4969-bbdc-4aa64d2b0308 service nova] Acquired lock "refresh_cache-1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 893.427903] env[68674]: DEBUG nova.network.neutron [req-176e388d-43b8-4357-9560-615060dbefb2 req-e0f5c27d-d6b9-4969-bbdc-4aa64d2b0308 service nova] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Refreshing network info cache for port b936c294-4b85-4fcd-9151-358b4db9d08d {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 893.437629] env[68674]: DEBUG oslo_vmware.api [None req-94bf0518-d032-462c-99ff-8ed7f0ae2ed4 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 893.437629] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a566ef-7a27-dff4-1ffe-fc5ddeed3e56" [ 893.437629] env[68674]: _type = "Task" [ 893.437629] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.451341] env[68674]: DEBUG oslo_vmware.api [None req-94bf0518-d032-462c-99ff-8ed7f0ae2ed4 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a566ef-7a27-dff4-1ffe-fc5ddeed3e56, 'name': SearchDatastore_Task, 'duration_secs': 0.010637} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.451851] env[68674]: DEBUG oslo_concurrency.lockutils [None req-94bf0518-d032-462c-99ff-8ed7f0ae2ed4 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 893.475868] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 893.477937] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Acquiring lock "refresh_cache-1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.507690] env[68674]: DEBUG oslo_vmware.api [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240267, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.559022] env[68674]: DEBUG nova.scheduler.client.report [None req-f613c16b-e449-41e1-85cf-c9f1425c4245 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 893.604067] env[68674]: DEBUG nova.network.neutron [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Successfully created port: 7d9b4902-f03b-4046-b4ba-0bc1296918da {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 893.663721] env[68674]: DEBUG oslo_vmware.api [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240268, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.798178] env[68674]: DEBUG oslo_vmware.api [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3240269, 'name': PowerOffVM_Task, 'duration_secs': 0.46272} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.798178] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 893.798178] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 893.798533] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-44b60d2c-8ee2-4541-8caa-18b4d1211ca3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.870055] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 893.870055] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 893.870055] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Deleting the datastore file [datastore2] f147b483-9384-4fc1-996e-e8fb035c1942 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 893.870055] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2f29a35b-c923-41cd-8ea4-d5fa39f52a16 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.875181] env[68674]: DEBUG oslo_vmware.api [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for the task: (returnval){ [ 893.875181] env[68674]: value = "task-3240271" [ 893.875181] env[68674]: _type = "Task" [ 893.875181] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.886316] env[68674]: DEBUG oslo_vmware.api [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3240271, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.001664] env[68674]: DEBUG oslo_vmware.api [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240267, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.026156] env[68674]: DEBUG nova.network.neutron [req-176e388d-43b8-4357-9560-615060dbefb2 req-e0f5c27d-d6b9-4969-bbdc-4aa64d2b0308 service nova] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 894.035996] env[68674]: DEBUG nova.compute.manager [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 894.067185] env[68674]: DEBUG nova.virt.hardware [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 894.067185] env[68674]: DEBUG nova.virt.hardware [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 894.067185] env[68674]: DEBUG nova.virt.hardware [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 894.067801] env[68674]: DEBUG nova.virt.hardware [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 894.069138] env[68674]: DEBUG nova.virt.hardware [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 894.069407] env[68674]: DEBUG nova.virt.hardware [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 894.069750] env[68674]: DEBUG nova.virt.hardware [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 894.070038] env[68674]: DEBUG nova.virt.hardware [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 894.070327] env[68674]: DEBUG nova.virt.hardware [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 894.070644] env[68674]: DEBUG nova.virt.hardware [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 894.070982] env[68674]: DEBUG nova.virt.hardware [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 894.071865] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f613c16b-e449-41e1-85cf-c9f1425c4245 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.065s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 894.076349] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-243c3690-1093-452d-bef9-be67443389eb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.087021] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6a36e8fd-fe07-4e35-a913-5b1b791be68a tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.831s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 894.087021] env[68674]: DEBUG nova.objects.instance [None req-6a36e8fd-fe07-4e35-a913-5b1b791be68a tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lazy-loading 'resources' on Instance uuid 95386cdb-c2e4-476a-8aaf-e10fdc40b591 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 894.092663] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f50e0f4d-39b2-4199-a7ba-63a7756fc18b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.112347] env[68674]: INFO nova.scheduler.client.report [None req-f613c16b-e449-41e1-85cf-c9f1425c4245 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Deleted allocations for instance 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca [ 894.133750] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquiring lock "7aa58e2f-1202-4252-9c38-ce53084c573f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 894.134284] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lock "7aa58e2f-1202-4252-9c38-ce53084c573f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 894.134642] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lock "7aa58e2f-1202-4252-9c38-ce53084c573f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 894.161652] env[68674]: DEBUG oslo_vmware.api [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240268, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.558236} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.162082] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] f6d28c5e-fe32-4c53-98ac-747a1b79e6c4/f6d28c5e-fe32-4c53-98ac-747a1b79e6c4.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 894.162486] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 894.162879] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-41d283cf-90f9-4873-a5db-8e42258fba26 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.170537] env[68674]: DEBUG oslo_vmware.api [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 894.170537] env[68674]: value = "task-3240272" [ 894.170537] env[68674]: _type = "Task" [ 894.170537] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.181118] env[68674]: DEBUG oslo_vmware.api [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240272, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.391508] env[68674]: DEBUG oslo_vmware.api [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3240271, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.462826] env[68674]: DEBUG nova.network.neutron [req-176e388d-43b8-4357-9560-615060dbefb2 req-e0f5c27d-d6b9-4969-bbdc-4aa64d2b0308 service nova] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.506276] env[68674]: DEBUG oslo_vmware.api [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240267, 'name': CloneVM_Task, 'duration_secs': 1.404729} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.506276] env[68674]: INFO nova.virt.vmwareapi.vmops [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Created linked-clone VM from snapshot [ 894.506276] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7328342-1eb7-418a-8a0e-828fc5d15900 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.512055] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Uploading image 563ae351-528c-4f48-afe4-222e4f9dee21 {{(pid=68674) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 894.540921] env[68674]: DEBUG oslo_vmware.rw_handles [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 894.540921] env[68674]: value = "vm-647589" [ 894.540921] env[68674]: _type = "VirtualMachine" [ 894.540921] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 894.541939] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d3ce3f10-83ef-49ef-9ca2-d84cd1ff0c63 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.552019] env[68674]: DEBUG oslo_vmware.rw_handles [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lease: (returnval){ [ 894.552019] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5201a98e-70f6-3785-160f-ba1a392977f5" [ 894.552019] env[68674]: _type = "HttpNfcLease" [ 894.552019] env[68674]: } obtained for exporting VM: (result){ [ 894.552019] env[68674]: value = "vm-647589" [ 894.552019] env[68674]: _type = "VirtualMachine" [ 894.552019] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 894.552019] env[68674]: DEBUG oslo_vmware.api [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the lease: (returnval){ [ 894.552019] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5201a98e-70f6-3785-160f-ba1a392977f5" [ 894.552019] env[68674]: _type = "HttpNfcLease" [ 894.552019] env[68674]: } to be ready. {{(pid=68674) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 894.563302] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 894.563302] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5201a98e-70f6-3785-160f-ba1a392977f5" [ 894.563302] env[68674]: _type = "HttpNfcLease" [ 894.563302] env[68674]: } is initializing. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 894.642568] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f613c16b-e449-41e1-85cf-c9f1425c4245 tempest-SecurityGroupsTestJSON-1160302523 tempest-SecurityGroupsTestJSON-1160302523-project-member] Lock "3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.451s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 894.685327] env[68674]: DEBUG oslo_vmware.api [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240272, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.108542} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.685866] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 894.686959] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ecb583b-f3b5-48b9-9caf-4eaea5120038 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.712660] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] f6d28c5e-fe32-4c53-98ac-747a1b79e6c4/f6d28c5e-fe32-4c53-98ac-747a1b79e6c4.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 894.715610] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-084a56e8-84db-45dc-a1a2-072255193d82 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.736868] env[68674]: DEBUG oslo_vmware.api [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 894.736868] env[68674]: value = "task-3240274" [ 894.736868] env[68674]: _type = "Task" [ 894.736868] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.745015] env[68674]: DEBUG oslo_vmware.api [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240274, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.886716] env[68674]: DEBUG oslo_vmware.api [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Task: {'id': task-3240271, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.584556} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.887265] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 894.887454] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 894.887635] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 894.887811] env[68674]: INFO nova.compute.manager [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Took 1.62 seconds to destroy the instance on the hypervisor. [ 894.888063] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 894.888627] env[68674]: DEBUG nova.compute.manager [-] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 894.888779] env[68674]: DEBUG nova.network.neutron [-] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 894.968161] env[68674]: DEBUG oslo_concurrency.lockutils [req-176e388d-43b8-4357-9560-615060dbefb2 req-e0f5c27d-d6b9-4969-bbdc-4aa64d2b0308 service nova] Releasing lock "refresh_cache-1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 894.969170] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Acquired lock "refresh_cache-1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 894.969389] env[68674]: DEBUG nova.network.neutron [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 895.061849] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 895.061849] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5201a98e-70f6-3785-160f-ba1a392977f5" [ 895.061849] env[68674]: _type = "HttpNfcLease" [ 895.061849] env[68674]: } is ready. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 895.062243] env[68674]: DEBUG oslo_vmware.rw_handles [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 895.062243] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5201a98e-70f6-3785-160f-ba1a392977f5" [ 895.062243] env[68674]: _type = "HttpNfcLease" [ 895.062243] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 895.062925] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c19b2122-c3bc-46cb-804f-62e7e037fbd0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.073357] env[68674]: DEBUG oslo_vmware.rw_handles [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e6c30f-299d-f2d0-33d3-6440846b0c29/disk-0.vmdk from lease info. {{(pid=68674) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 895.073357] env[68674]: DEBUG oslo_vmware.rw_handles [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e6c30f-299d-f2d0-33d3-6440846b0c29/disk-0.vmdk for reading. {{(pid=68674) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 895.142918] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf3ee5be-d451-40a8-81b4-2f1dc1c9704b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.155122] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44d29fa8-da62-414f-bfc5-bff3492d92b1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.189984] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquiring lock "refresh_cache-7aa58e2f-1202-4252-9c38-ce53084c573f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.190205] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquired lock "refresh_cache-7aa58e2f-1202-4252-9c38-ce53084c573f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 895.190382] env[68674]: DEBUG nova.network.neutron [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 895.194588] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cffa3054-0be0-4b8b-805f-ba901a1e03d3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.197587] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-eec1f36f-8bdc-4cb1-9410-a5019974e550 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.206853] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a415c1eb-b88a-48ad-aef9-a73a36be0d11 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.222874] env[68674]: DEBUG nova.compute.provider_tree [None req-6a36e8fd-fe07-4e35-a913-5b1b791be68a tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 895.252763] env[68674]: DEBUG oslo_vmware.api [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240274, 'name': ReconfigVM_Task, 'duration_secs': 0.327908} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.253483] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Reconfigured VM instance instance-0000004a to attach disk [datastore2] f6d28c5e-fe32-4c53-98ac-747a1b79e6c4/f6d28c5e-fe32-4c53-98ac-747a1b79e6c4.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 895.253661] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cf418094-391e-49b1-8598-4ca52e4f34bf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.260340] env[68674]: DEBUG oslo_vmware.api [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 895.260340] env[68674]: value = "task-3240275" [ 895.260340] env[68674]: _type = "Task" [ 895.260340] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.272020] env[68674]: DEBUG oslo_vmware.api [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240275, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.460142] env[68674]: DEBUG nova.compute.manager [req-4c832131-0d7a-4419-9b2a-3a261a9b85b8 req-a3636cb8-5eeb-47aa-948b-10577b1c85c9 service nova] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Received event network-vif-deleted-9353371a-f186-4ea6-a770-95c9d40d2340 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 895.460424] env[68674]: INFO nova.compute.manager [req-4c832131-0d7a-4419-9b2a-3a261a9b85b8 req-a3636cb8-5eeb-47aa-948b-10577b1c85c9 service nova] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Neutron deleted interface 9353371a-f186-4ea6-a770-95c9d40d2340; detaching it from the instance and deleting it from the info cache [ 895.460649] env[68674]: DEBUG nova.network.neutron [req-4c832131-0d7a-4419-9b2a-3a261a9b85b8 req-a3636cb8-5eeb-47aa-948b-10577b1c85c9 service nova] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.529033] env[68674]: DEBUG nova.network.neutron [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 895.730148] env[68674]: DEBUG nova.scheduler.client.report [None req-6a36e8fd-fe07-4e35-a913-5b1b791be68a tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 895.731242] env[68674]: DEBUG nova.network.neutron [-] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.771296] env[68674]: DEBUG oslo_vmware.api [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240275, 'name': Rename_Task, 'duration_secs': 0.153275} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.771296] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 895.771898] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8a6c0dea-f5b6-4602-9d1e-018b06e2694c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.780724] env[68674]: DEBUG oslo_vmware.api [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 895.780724] env[68674]: value = "task-3240276" [ 895.780724] env[68674]: _type = "Task" [ 895.780724] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.788119] env[68674]: DEBUG oslo_vmware.api [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240276, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.847703] env[68674]: DEBUG nova.network.neutron [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Updating instance_info_cache with network_info: [{"id": "b936c294-4b85-4fcd-9151-358b4db9d08d", "address": "fa:16:3e:f6:db:66", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.129", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb936c294-4b", "ovs_interfaceid": "b936c294-4b85-4fcd-9151-358b4db9d08d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.964073] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-70bb61a0-4a13-41d9-806f-a2d0c97654d6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.979273] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c163cad-772e-4d6e-a79c-0f42193e66c2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.029838] env[68674]: DEBUG nova.compute.manager [req-4c832131-0d7a-4419-9b2a-3a261a9b85b8 req-a3636cb8-5eeb-47aa-948b-10577b1c85c9 service nova] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Detach interface failed, port_id=9353371a-f186-4ea6-a770-95c9d40d2340, reason: Instance f147b483-9384-4fc1-996e-e8fb035c1942 could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 896.234366] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6a36e8fd-fe07-4e35-a913-5b1b791be68a tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.149s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 896.237196] env[68674]: INFO nova.compute.manager [-] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Took 1.35 seconds to deallocate network for instance. [ 896.238148] env[68674]: DEBUG oslo_concurrency.lockutils [None req-003a56b2-ef94-4371-ae20-1995599de147 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 20.964s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 896.263870] env[68674]: INFO nova.scheduler.client.report [None req-6a36e8fd-fe07-4e35-a913-5b1b791be68a tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Deleted allocations for instance 95386cdb-c2e4-476a-8aaf-e10fdc40b591 [ 896.292212] env[68674]: DEBUG oslo_vmware.api [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240276, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.352625] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Releasing lock "refresh_cache-1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 896.356022] env[68674]: DEBUG nova.compute.manager [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Instance network_info: |[{"id": "b936c294-4b85-4fcd-9151-358b4db9d08d", "address": "fa:16:3e:f6:db:66", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.129", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb936c294-4b", "ovs_interfaceid": "b936c294-4b85-4fcd-9151-358b4db9d08d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 896.356022] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f6:db:66', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'df1bf911-aac9-4d2d-ae69-66ace3e6a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b936c294-4b85-4fcd-9151-358b4db9d08d', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 896.362833] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Creating folder: Project (4fbaef606f1948db867cd3a0c5ff3692). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 896.363683] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eb0a98d3-38fc-4ce3-a426-6114c5bd95a2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.375738] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Created folder: Project (4fbaef606f1948db867cd3a0c5ff3692) in parent group-v647377. [ 896.375862] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Creating folder: Instances. Parent ref: group-v647590. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 896.376102] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5470ece9-dc49-48d6-8906-145fed00b423 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.382201] env[68674]: DEBUG nova.network.neutron [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Updating instance_info_cache with network_info: [{"id": "d24d2f1b-cc82-45a9-8d5c-94505a4de39f", "address": "fa:16:3e:b3:08:90", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.109", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd24d2f1b-cc", "ovs_interfaceid": "d24d2f1b-cc82-45a9-8d5c-94505a4de39f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.388325] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Created folder: Instances in parent group-v647590. [ 896.388325] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 896.390075] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 896.390275] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-55cfa349-7b87-4fe8-abeb-35010ff24e48 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.418322] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 896.418322] env[68674]: value = "task-3240279" [ 896.418322] env[68674]: _type = "Task" [ 896.418322] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.427484] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240279, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.607652] env[68674]: DEBUG nova.network.neutron [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Successfully updated port: 7d9b4902-f03b-4046-b4ba-0bc1296918da {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 896.753392] env[68674]: DEBUG oslo_concurrency.lockutils [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 896.777745] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6a36e8fd-fe07-4e35-a913-5b1b791be68a tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "95386cdb-c2e4-476a-8aaf-e10fdc40b591" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.983s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 896.794610] env[68674]: DEBUG oslo_vmware.api [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240276, 'name': PowerOnVM_Task, 'duration_secs': 0.684594} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.794985] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 896.795256] env[68674]: INFO nova.compute.manager [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Took 8.20 seconds to spawn the instance on the hypervisor. [ 896.795478] env[68674]: DEBUG nova.compute.manager [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 896.796532] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2172bc5-8fe1-46e1-94e8-6ea6c6c778c0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.886678] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Releasing lock "refresh_cache-7aa58e2f-1202-4252-9c38-ce53084c573f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 896.929859] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240279, 'name': CreateVM_Task, 'duration_secs': 0.421268} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.929859] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 896.930824] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.930906] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 896.931921] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 896.932334] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a75dcf7-14e5-47fc-8662-f505129501a8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.942257] env[68674]: DEBUG oslo_vmware.api [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Waiting for the task: (returnval){ [ 896.942257] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52bda8a3-8824-dfc2-08b0-5f0c1c367733" [ 896.942257] env[68674]: _type = "Task" [ 896.942257] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.958032] env[68674]: DEBUG oslo_vmware.api [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52bda8a3-8824-dfc2-08b0-5f0c1c367733, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.114718] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "refresh_cache-0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.114875] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquired lock "refresh_cache-0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 897.115044] env[68674]: DEBUG nova.network.neutron [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 897.242548] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-543e824e-2385-46b0-ad79-2468813be49e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.251535] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b97dff9a-af4a-4fc0-bddf-ff0dcc58bacd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.289095] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ede581f3-5a94-43ba-a651-0b8284b7f935 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.298685] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2032bdf-ca74-4ab4-882a-d0ba81e0877e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.321159] env[68674]: DEBUG nova.compute.provider_tree [None req-003a56b2-ef94-4371-ae20-1995599de147 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 897.323969] env[68674]: INFO nova.compute.manager [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Took 49.39 seconds to build instance. [ 897.422135] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9959ad6a-0a7a-44f6-94b4-683eb388d87a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.448885] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76df41ae-44f8-4a2d-b15e-b2518890f318 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.470508] env[68674]: DEBUG oslo_vmware.api [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52bda8a3-8824-dfc2-08b0-5f0c1c367733, 'name': SearchDatastore_Task, 'duration_secs': 0.017328} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.470508] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Updating instance '7aa58e2f-1202-4252-9c38-ce53084c573f' progress to 83 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 897.472418] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 897.472847] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 897.473204] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.473580] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 897.473896] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 897.474921] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-28639e02-3520-4454-9d29-030ce5300a4d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.487024] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 897.487024] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 897.487024] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5763c385-0f67-4997-bc71-a0cfed1fa10b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.492277] env[68674]: DEBUG oslo_vmware.api [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Waiting for the task: (returnval){ [ 897.492277] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b21e1c-7406-b948-b3e2-5141de419315" [ 897.492277] env[68674]: _type = "Task" [ 897.492277] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.501608] env[68674]: DEBUG oslo_vmware.api [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b21e1c-7406-b948-b3e2-5141de419315, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.709460] env[68674]: DEBUG nova.network.neutron [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 897.714261] env[68674]: DEBUG nova.compute.manager [req-752744c1-3220-4c4a-beff-e2df754e0cfa req-b4a94588-e658-42bf-893c-4c02d7422444 service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Received event network-vif-plugged-7d9b4902-f03b-4046-b4ba-0bc1296918da {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 897.714261] env[68674]: DEBUG oslo_concurrency.lockutils [req-752744c1-3220-4c4a-beff-e2df754e0cfa req-b4a94588-e658-42bf-893c-4c02d7422444 service nova] Acquiring lock "0e7c5243-ad33-4391-8977-b9019643e3de-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 897.714261] env[68674]: DEBUG oslo_concurrency.lockutils [req-752744c1-3220-4c4a-beff-e2df754e0cfa req-b4a94588-e658-42bf-893c-4c02d7422444 service nova] Lock "0e7c5243-ad33-4391-8977-b9019643e3de-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 897.714261] env[68674]: DEBUG oslo_concurrency.lockutils [req-752744c1-3220-4c4a-beff-e2df754e0cfa req-b4a94588-e658-42bf-893c-4c02d7422444 service nova] Lock "0e7c5243-ad33-4391-8977-b9019643e3de-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 897.714261] env[68674]: DEBUG nova.compute.manager [req-752744c1-3220-4c4a-beff-e2df754e0cfa req-b4a94588-e658-42bf-893c-4c02d7422444 service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] No waiting events found dispatching network-vif-plugged-7d9b4902-f03b-4046-b4ba-0bc1296918da {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 897.714777] env[68674]: WARNING nova.compute.manager [req-752744c1-3220-4c4a-beff-e2df754e0cfa req-b4a94588-e658-42bf-893c-4c02d7422444 service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Received unexpected event network-vif-plugged-7d9b4902-f03b-4046-b4ba-0bc1296918da for instance with vm_state building and task_state spawning. [ 897.715011] env[68674]: DEBUG nova.compute.manager [req-752744c1-3220-4c4a-beff-e2df754e0cfa req-b4a94588-e658-42bf-893c-4c02d7422444 service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Received event network-changed-7d9b4902-f03b-4046-b4ba-0bc1296918da {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 897.715179] env[68674]: DEBUG nova.compute.manager [req-752744c1-3220-4c4a-beff-e2df754e0cfa req-b4a94588-e658-42bf-893c-4c02d7422444 service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Refreshing instance network info cache due to event network-changed-7d9b4902-f03b-4046-b4ba-0bc1296918da. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 897.715352] env[68674]: DEBUG oslo_concurrency.lockutils [req-752744c1-3220-4c4a-beff-e2df754e0cfa req-b4a94588-e658-42bf-893c-4c02d7422444 service nova] Acquiring lock "refresh_cache-0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.826752] env[68674]: DEBUG nova.scheduler.client.report [None req-003a56b2-ef94-4371-ae20-1995599de147 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 897.833158] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1f20a910-8ab0-49fb-8268-0d75d182272a tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "f6d28c5e-fe32-4c53-98ac-747a1b79e6c4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.808s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 897.957741] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4ab20d4c-6d24-4033-8d9f-a0489d0c1343 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "f6d28c5e-fe32-4c53-98ac-747a1b79e6c4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 897.958070] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4ab20d4c-6d24-4033-8d9f-a0489d0c1343 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "f6d28c5e-fe32-4c53-98ac-747a1b79e6c4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 897.958333] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4ab20d4c-6d24-4033-8d9f-a0489d0c1343 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "f6d28c5e-fe32-4c53-98ac-747a1b79e6c4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 897.958558] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4ab20d4c-6d24-4033-8d9f-a0489d0c1343 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "f6d28c5e-fe32-4c53-98ac-747a1b79e6c4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 897.958787] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4ab20d4c-6d24-4033-8d9f-a0489d0c1343 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "f6d28c5e-fe32-4c53-98ac-747a1b79e6c4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 897.965157] env[68674]: INFO nova.compute.manager [None req-4ab20d4c-6d24-4033-8d9f-a0489d0c1343 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Terminating instance [ 897.977444] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 897.978570] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-22b1dd9a-4d36-461b-8788-e279765348b1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.988148] env[68674]: DEBUG oslo_vmware.api [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 897.988148] env[68674]: value = "task-3240280" [ 897.988148] env[68674]: _type = "Task" [ 897.988148] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.999542] env[68674]: DEBUG oslo_vmware.api [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240280, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.010970] env[68674]: DEBUG oslo_vmware.api [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b21e1c-7406-b948-b3e2-5141de419315, 'name': SearchDatastore_Task, 'duration_secs': 0.010283} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.011929] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37aa6533-e6d2-486e-9c61-5414b214b960 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.017650] env[68674]: DEBUG oslo_vmware.api [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Waiting for the task: (returnval){ [ 898.017650] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5251cd33-ff5e-5e1c-2db8-b30c4a1fd7fa" [ 898.017650] env[68674]: _type = "Task" [ 898.017650] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.027371] env[68674]: DEBUG oslo_vmware.api [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5251cd33-ff5e-5e1c-2db8-b30c4a1fd7fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.269900] env[68674]: DEBUG nova.network.neutron [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Updating instance_info_cache with network_info: [{"id": "7d9b4902-f03b-4046-b4ba-0bc1296918da", "address": "fa:16:3e:17:ab:03", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d9b4902-f0", "ovs_interfaceid": "7d9b4902-f03b-4046-b4ba-0bc1296918da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.471900] env[68674]: DEBUG nova.compute.manager [None req-4ab20d4c-6d24-4033-8d9f-a0489d0c1343 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 898.472763] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4ab20d4c-6d24-4033-8d9f-a0489d0c1343 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 898.473912] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c82bae2f-b2e4-47b3-be9e-298a2026404e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.484426] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ab20d4c-6d24-4033-8d9f-a0489d0c1343 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 898.484426] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3e39f2aa-39ca-428f-bc35-6335e6b8fbbf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.498959] env[68674]: DEBUG oslo_vmware.api [None req-4ab20d4c-6d24-4033-8d9f-a0489d0c1343 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 898.498959] env[68674]: value = "task-3240281" [ 898.498959] env[68674]: _type = "Task" [ 898.498959] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.507554] env[68674]: DEBUG oslo_vmware.api [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240280, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.514892] env[68674]: DEBUG oslo_vmware.api [None req-4ab20d4c-6d24-4033-8d9f-a0489d0c1343 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240281, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.529059] env[68674]: DEBUG oslo_vmware.api [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5251cd33-ff5e-5e1c-2db8-b30c4a1fd7fa, 'name': SearchDatastore_Task, 'duration_secs': 0.022748} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.529503] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 898.529983] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3/1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 898.529983] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-781952dd-109d-482b-bfee-53321f58f866 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.537897] env[68674]: DEBUG oslo_vmware.api [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Waiting for the task: (returnval){ [ 898.537897] env[68674]: value = "task-3240282" [ 898.537897] env[68674]: _type = "Task" [ 898.537897] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.548123] env[68674]: DEBUG oslo_vmware.api [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': task-3240282, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.773875] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Releasing lock "refresh_cache-0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 898.774295] env[68674]: DEBUG nova.compute.manager [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Instance network_info: |[{"id": "7d9b4902-f03b-4046-b4ba-0bc1296918da", "address": "fa:16:3e:17:ab:03", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d9b4902-f0", "ovs_interfaceid": "7d9b4902-f03b-4046-b4ba-0bc1296918da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 898.774632] env[68674]: DEBUG oslo_concurrency.lockutils [req-752744c1-3220-4c4a-beff-e2df754e0cfa req-b4a94588-e658-42bf-893c-4c02d7422444 service nova] Acquired lock "refresh_cache-0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 898.774842] env[68674]: DEBUG nova.network.neutron [req-752744c1-3220-4c4a-beff-e2df754e0cfa req-b4a94588-e658-42bf-893c-4c02d7422444 service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Refreshing network info cache for port 7d9b4902-f03b-4046-b4ba-0bc1296918da {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 898.779022] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:ab:03', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea4fe416-47a6-4542-b59d-8c71ab4d6503', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7d9b4902-f03b-4046-b4ba-0bc1296918da', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 898.788697] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 898.792620] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 898.793316] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bc93f764-babe-489a-8ecd-8282bb921fb5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.828857] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 898.828857] env[68674]: value = "task-3240283" [ 898.828857] env[68674]: _type = "Task" [ 898.828857] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.842197] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240283, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.845238] env[68674]: DEBUG oslo_concurrency.lockutils [None req-003a56b2-ef94-4371-ae20-1995599de147 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.607s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 898.848161] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a6fecb7d-6bc4-41df-a8c2-456209a123c3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.944s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.848588] env[68674]: DEBUG nova.objects.instance [None req-a6fecb7d-6bc4-41df-a8c2-456209a123c3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Lazy-loading 'resources' on Instance uuid fa89e0b5-590d-43fb-bb11-02f8fdee0c2f {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 898.916167] env[68674]: DEBUG oslo_concurrency.lockutils [None req-65038564-642d-4b80-915c-cd495d336380 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "f69c5fcf-6d25-48a5-a154-c3632c76175a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.916467] env[68674]: DEBUG oslo_concurrency.lockutils [None req-65038564-642d-4b80-915c-cd495d336380 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "f69c5fcf-6d25-48a5-a154-c3632c76175a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.916787] env[68674]: DEBUG oslo_concurrency.lockutils [None req-65038564-642d-4b80-915c-cd495d336380 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "f69c5fcf-6d25-48a5-a154-c3632c76175a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.917029] env[68674]: DEBUG oslo_concurrency.lockutils [None req-65038564-642d-4b80-915c-cd495d336380 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "f69c5fcf-6d25-48a5-a154-c3632c76175a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.917214] env[68674]: DEBUG oslo_concurrency.lockutils [None req-65038564-642d-4b80-915c-cd495d336380 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "f69c5fcf-6d25-48a5-a154-c3632c76175a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 898.923583] env[68674]: INFO nova.compute.manager [None req-65038564-642d-4b80-915c-cd495d336380 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Terminating instance [ 899.006995] env[68674]: DEBUG oslo_vmware.api [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240280, 'name': PowerOnVM_Task, 'duration_secs': 0.62394} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.008065] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 899.008277] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bb3f7997-5edd-43ae-9447-2c02c120e857 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Updating instance '7aa58e2f-1202-4252-9c38-ce53084c573f' progress to 100 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 899.019635] env[68674]: DEBUG oslo_vmware.api [None req-4ab20d4c-6d24-4033-8d9f-a0489d0c1343 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240281, 'name': PowerOffVM_Task, 'duration_secs': 0.313729} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.021224] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ab20d4c-6d24-4033-8d9f-a0489d0c1343 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 899.021701] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4ab20d4c-6d24-4033-8d9f-a0489d0c1343 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 899.021993] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a9d780ad-fb14-40b8-8d82-088a89b3ef32 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.054515] env[68674]: DEBUG oslo_vmware.api [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': task-3240282, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.125931] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4ab20d4c-6d24-4033-8d9f-a0489d0c1343 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 899.130228] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4ab20d4c-6d24-4033-8d9f-a0489d0c1343 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 899.130228] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ab20d4c-6d24-4033-8d9f-a0489d0c1343 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Deleting the datastore file [datastore2] f6d28c5e-fe32-4c53-98ac-747a1b79e6c4 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 899.130228] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e2da9993-8e0e-4bd9-ad24-17c7bf85a554 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.135545] env[68674]: DEBUG oslo_vmware.api [None req-4ab20d4c-6d24-4033-8d9f-a0489d0c1343 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 899.135545] env[68674]: value = "task-3240285" [ 899.135545] env[68674]: _type = "Task" [ 899.135545] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.149037] env[68674]: DEBUG oslo_vmware.api [None req-4ab20d4c-6d24-4033-8d9f-a0489d0c1343 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240285, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.174330] env[68674]: DEBUG nova.network.neutron [req-752744c1-3220-4c4a-beff-e2df754e0cfa req-b4a94588-e658-42bf-893c-4c02d7422444 service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Updated VIF entry in instance network info cache for port 7d9b4902-f03b-4046-b4ba-0bc1296918da. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 899.174732] env[68674]: DEBUG nova.network.neutron [req-752744c1-3220-4c4a-beff-e2df754e0cfa req-b4a94588-e658-42bf-893c-4c02d7422444 service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Updating instance_info_cache with network_info: [{"id": "7d9b4902-f03b-4046-b4ba-0bc1296918da", "address": "fa:16:3e:17:ab:03", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d9b4902-f0", "ovs_interfaceid": "7d9b4902-f03b-4046-b4ba-0bc1296918da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.340786] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240283, 'name': CreateVM_Task, 'duration_secs': 0.440038} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.340786] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 899.341368] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.341654] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 899.342048] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 899.342407] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e103519-2472-45c9-a298-2d72e32687f9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.348729] env[68674]: DEBUG oslo_vmware.api [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 899.348729] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5231fd1e-6700-2f24-4f4f-4ba47ef404e1" [ 899.348729] env[68674]: _type = "Task" [ 899.348729] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.371124] env[68674]: DEBUG oslo_vmware.api [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5231fd1e-6700-2f24-4f4f-4ba47ef404e1, 'name': SearchDatastore_Task, 'duration_secs': 0.018307} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.372066] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 899.372066] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 899.372293] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.372722] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 899.373085] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 899.373426] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1234a83f-85c4-4d7e-9496-6c6e87478021 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.385361] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 899.386403] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 899.386696] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0fe64ae5-4aaf-4572-a13f-4860e8bbb21a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.395522] env[68674]: DEBUG oslo_vmware.api [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 899.395522] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52724bdc-ec29-2b51-7a90-59b43b70bf91" [ 899.395522] env[68674]: _type = "Task" [ 899.395522] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.404275] env[68674]: DEBUG oslo_vmware.api [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52724bdc-ec29-2b51-7a90-59b43b70bf91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.431893] env[68674]: DEBUG nova.compute.manager [None req-65038564-642d-4b80-915c-cd495d336380 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 899.432138] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-65038564-642d-4b80-915c-cd495d336380 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 899.433738] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-046adedf-d29d-47e7-a063-2821a19cae67 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.437505] env[68674]: INFO nova.scheduler.client.report [None req-003a56b2-ef94-4371-ae20-1995599de147 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Deleted allocation for migration c843256d-6ec8-4075-9fbc-e7988cea5eb5 [ 899.443763] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-65038564-642d-4b80-915c-cd495d336380 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 899.444349] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fdee3bb2-1457-463b-ac1e-1824bf8251ed {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.452766] env[68674]: DEBUG oslo_vmware.api [None req-65038564-642d-4b80-915c-cd495d336380 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 899.452766] env[68674]: value = "task-3240286" [ 899.452766] env[68674]: _type = "Task" [ 899.452766] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.465370] env[68674]: DEBUG oslo_vmware.api [None req-65038564-642d-4b80-915c-cd495d336380 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240286, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.560070] env[68674]: DEBUG oslo_vmware.api [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': task-3240282, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.641793} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.561237] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3/1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 899.561492] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 899.561859] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-62b73f67-b095-4065-a66f-dead57cd6d1d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.573702] env[68674]: DEBUG oslo_vmware.api [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Waiting for the task: (returnval){ [ 899.573702] env[68674]: value = "task-3240287" [ 899.573702] env[68674]: _type = "Task" [ 899.573702] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.586731] env[68674]: DEBUG oslo_vmware.api [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': task-3240287, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.650673] env[68674]: DEBUG oslo_vmware.api [None req-4ab20d4c-6d24-4033-8d9f-a0489d0c1343 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240285, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.302162} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.651547] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ab20d4c-6d24-4033-8d9f-a0489d0c1343 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 899.651547] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4ab20d4c-6d24-4033-8d9f-a0489d0c1343 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 899.651547] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4ab20d4c-6d24-4033-8d9f-a0489d0c1343 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 899.651547] env[68674]: INFO nova.compute.manager [None req-4ab20d4c-6d24-4033-8d9f-a0489d0c1343 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Took 1.18 seconds to destroy the instance on the hypervisor. [ 899.651982] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4ab20d4c-6d24-4033-8d9f-a0489d0c1343 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 899.652479] env[68674]: DEBUG nova.compute.manager [-] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 899.652608] env[68674]: DEBUG nova.network.neutron [-] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 899.678615] env[68674]: DEBUG oslo_concurrency.lockutils [req-752744c1-3220-4c4a-beff-e2df754e0cfa req-b4a94588-e658-42bf-893c-4c02d7422444 service nova] Releasing lock "refresh_cache-0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 899.887028] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee6ae6c1-78c4-49c9-b67f-e34f9248df1d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.908343] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c889764-7709-48b1-b89b-de20bb4b8339 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.916655] env[68674]: DEBUG oslo_vmware.api [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52724bdc-ec29-2b51-7a90-59b43b70bf91, 'name': SearchDatastore_Task, 'duration_secs': 0.016007} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.949407] env[68674]: DEBUG oslo_concurrency.lockutils [None req-003a56b2-ef94-4371-ae20-1995599de147 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "e1283f87-5bdb-4d4e-a1c5-f3b1c9180188" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 28.266s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 899.951200] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0d5f48e-be89-42cb-8ee3-591a284f2886 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.957446] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66483081-edb0-4377-8789-728fa0f5f88c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.966103] env[68674]: DEBUG oslo_vmware.api [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 899.966103] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5280a9c6-28f0-1d25-98ea-049d43dc2d8c" [ 899.966103] env[68674]: _type = "Task" [ 899.966103] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.970895] env[68674]: DEBUG oslo_vmware.api [None req-65038564-642d-4b80-915c-cd495d336380 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240286, 'name': PowerOffVM_Task, 'duration_secs': 0.272866} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.974836] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-65038564-642d-4b80-915c-cd495d336380 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 899.974836] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-65038564-642d-4b80-915c-cd495d336380 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 899.975935] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0e7aa7b-74fc-4a16-897a-75cacc6147e8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.982033] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e0d76aa7-e62b-45b1-ba52-e67ae5d0ef8a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.989385] env[68674]: DEBUG oslo_vmware.api [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5280a9c6-28f0-1d25-98ea-049d43dc2d8c, 'name': SearchDatastore_Task, 'duration_secs': 0.01709} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.997532] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 899.997830] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 0e7c5243-ad33-4391-8977-b9019643e3de/0e7c5243-ad33-4391-8977-b9019643e3de.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 899.998707] env[68674]: DEBUG nova.compute.provider_tree [None req-a6fecb7d-6bc4-41df-a8c2-456209a123c3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 900.003020] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-02b8a357-574b-4de7-8239-c8ff097bea73 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.007619] env[68674]: DEBUG oslo_vmware.api [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 900.007619] env[68674]: value = "task-3240289" [ 900.007619] env[68674]: _type = "Task" [ 900.007619] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.017445] env[68674]: DEBUG oslo_vmware.api [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240289, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.055862] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-65038564-642d-4b80-915c-cd495d336380 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 900.055862] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-65038564-642d-4b80-915c-cd495d336380 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 900.055862] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-65038564-642d-4b80-915c-cd495d336380 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Deleting the datastore file [datastore2] f69c5fcf-6d25-48a5-a154-c3632c76175a {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 900.056053] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a3f2208a-4aca-4164-897c-e4aa80b9668a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.063388] env[68674]: DEBUG oslo_vmware.api [None req-65038564-642d-4b80-915c-cd495d336380 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 900.063388] env[68674]: value = "task-3240290" [ 900.063388] env[68674]: _type = "Task" [ 900.063388] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.072239] env[68674]: DEBUG oslo_vmware.api [None req-65038564-642d-4b80-915c-cd495d336380 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240290, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.091161] env[68674]: DEBUG oslo_vmware.api [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': task-3240287, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072368} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.091313] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 900.092122] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae0fe209-c34e-4328-8f53-e4b6a5be849b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.117044] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Reconfiguring VM instance instance-0000004b to attach disk [datastore2] 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3/1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 900.117044] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-370e49ee-cc0c-41bf-8b46-4bb5ea3a0d93 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.139099] env[68674]: DEBUG oslo_vmware.api [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Waiting for the task: (returnval){ [ 900.139099] env[68674]: value = "task-3240291" [ 900.139099] env[68674]: _type = "Task" [ 900.139099] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.151609] env[68674]: DEBUG oslo_vmware.api [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': task-3240291, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.186219] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "e684ec31-b5d9-458c-bbba-36ada7f275bd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 900.186274] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "e684ec31-b5d9-458c-bbba-36ada7f275bd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 900.391106] env[68674]: DEBUG nova.compute.manager [req-88e003d0-8207-4306-b489-958f98dbc73b req-474cf2a1-14fd-4ebd-bad4-7529b0fc9434 service nova] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Received event network-vif-deleted-461dbfba-04af-43fe-b80d-c6c22078b46a {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 900.391404] env[68674]: INFO nova.compute.manager [req-88e003d0-8207-4306-b489-958f98dbc73b req-474cf2a1-14fd-4ebd-bad4-7529b0fc9434 service nova] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Neutron deleted interface 461dbfba-04af-43fe-b80d-c6c22078b46a; detaching it from the instance and deleting it from the info cache [ 900.391404] env[68674]: DEBUG nova.network.neutron [req-88e003d0-8207-4306-b489-958f98dbc73b req-474cf2a1-14fd-4ebd-bad4-7529b0fc9434 service nova] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.504220] env[68674]: DEBUG nova.scheduler.client.report [None req-a6fecb7d-6bc4-41df-a8c2-456209a123c3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 900.522464] env[68674]: DEBUG oslo_vmware.api [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240289, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.575174] env[68674]: DEBUG oslo_vmware.api [None req-65038564-642d-4b80-915c-cd495d336380 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240290, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.335371} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.575499] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-65038564-642d-4b80-915c-cd495d336380 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 900.575715] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-65038564-642d-4b80-915c-cd495d336380 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 900.575914] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-65038564-642d-4b80-915c-cd495d336380 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 900.576122] env[68674]: INFO nova.compute.manager [None req-65038564-642d-4b80-915c-cd495d336380 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Took 1.14 seconds to destroy the instance on the hypervisor. [ 900.576401] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-65038564-642d-4b80-915c-cd495d336380 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 900.576626] env[68674]: DEBUG nova.compute.manager [-] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 900.576743] env[68674]: DEBUG nova.network.neutron [-] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 900.635386] env[68674]: DEBUG nova.network.neutron [-] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.652446] env[68674]: DEBUG oslo_vmware.api [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': task-3240291, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.692878] env[68674]: DEBUG nova.compute.manager [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 900.895334] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aecdf232-22aa-4716-a532-19cbcdbfb322 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.906767] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f04718a-044d-40a9-a48a-59a44b562ba1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.948778] env[68674]: DEBUG nova.compute.manager [req-88e003d0-8207-4306-b489-958f98dbc73b req-474cf2a1-14fd-4ebd-bad4-7529b0fc9434 service nova] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Detach interface failed, port_id=461dbfba-04af-43fe-b80d-c6c22078b46a, reason: Instance f6d28c5e-fe32-4c53-98ac-747a1b79e6c4 could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 901.015218] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a6fecb7d-6bc4-41df-a8c2-456209a123c3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.166s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 901.020868] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 18.892s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 901.021188] env[68674]: DEBUG nova.objects.instance [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68674) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 901.025017] env[68674]: DEBUG oslo_vmware.api [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240289, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.053720] env[68674]: INFO nova.scheduler.client.report [None req-a6fecb7d-6bc4-41df-a8c2-456209a123c3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Deleted allocations for instance fa89e0b5-590d-43fb-bb11-02f8fdee0c2f [ 901.139732] env[68674]: INFO nova.compute.manager [-] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Took 1.49 seconds to deallocate network for instance. [ 901.162418] env[68674]: DEBUG oslo_vmware.api [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': task-3240291, 'name': ReconfigVM_Task, 'duration_secs': 0.67926} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.162418] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Reconfigured VM instance instance-0000004b to attach disk [datastore2] 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3/1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 901.163272] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-49c1927e-70de-4c2b-9144-25e2136f79bd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.174328] env[68674]: DEBUG oslo_vmware.api [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Waiting for the task: (returnval){ [ 901.174328] env[68674]: value = "task-3240292" [ 901.174328] env[68674]: _type = "Task" [ 901.174328] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.192551] env[68674]: DEBUG oslo_vmware.api [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': task-3240292, 'name': Rename_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.227514] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 901.525781] env[68674]: DEBUG oslo_vmware.api [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240289, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.569961] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a6fecb7d-6bc4-41df-a8c2-456209a123c3 tempest-VolumesAdminNegativeTest-863754631 tempest-VolumesAdminNegativeTest-863754631-project-member] Lock "fa89e0b5-590d-43fb-bb11-02f8fdee0c2f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.750s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 901.659045] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4ab20d4c-6d24-4033-8d9f-a0489d0c1343 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 901.686730] env[68674]: DEBUG oslo_vmware.api [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': task-3240292, 'name': Rename_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.715340] env[68674]: DEBUG nova.network.neutron [-] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.024022] env[68674]: DEBUG oslo_vmware.api [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240289, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.781315} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.024022] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 0e7c5243-ad33-4391-8977-b9019643e3de/0e7c5243-ad33-4391-8977-b9019643e3de.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 902.024022] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 902.024022] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-08acef42-5718-4ca1-ab62-57e8ddd338e3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.032844] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1f696fa3-b419-482b-90ed-8eed9cb14b02 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 902.034914] env[68674]: DEBUG oslo_vmware.api [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 902.034914] env[68674]: value = "task-3240293" [ 902.034914] env[68674]: _type = "Task" [ 902.034914] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.037137] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.914s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 902.037137] env[68674]: DEBUG nova.objects.instance [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Lazy-loading 'resources' on Instance uuid 867fd9ca-049f-441a-94bc-af60df598043 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 902.060941] env[68674]: DEBUG oslo_vmware.api [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240293, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.060941] env[68674]: DEBUG nova.network.neutron [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Port d24d2f1b-cc82-45a9-8d5c-94505a4de39f binding to destination host cpu-1 is already ACTIVE {{(pid=68674) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 902.060941] env[68674]: DEBUG oslo_concurrency.lockutils [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquiring lock "refresh_cache-7aa58e2f-1202-4252-9c38-ce53084c573f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.061132] env[68674]: DEBUG oslo_concurrency.lockutils [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquired lock "refresh_cache-7aa58e2f-1202-4252-9c38-ce53084c573f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 902.061304] env[68674]: DEBUG nova.network.neutron [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 902.190905] env[68674]: DEBUG oslo_vmware.api [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': task-3240292, 'name': Rename_Task, 'duration_secs': 0.85351} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.191327] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 902.191667] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a7af691e-8380-45c7-8ed1-6de49f21e3a2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.200035] env[68674]: DEBUG oslo_vmware.api [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Waiting for the task: (returnval){ [ 902.200035] env[68674]: value = "task-3240294" [ 902.200035] env[68674]: _type = "Task" [ 902.200035] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.221569] env[68674]: INFO nova.compute.manager [-] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Took 1.64 seconds to deallocate network for instance. [ 902.222384] env[68674]: DEBUG oslo_vmware.api [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': task-3240294, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.434377] env[68674]: DEBUG nova.compute.manager [req-f216a53a-fd69-4739-a6a1-1c26d436da4b req-0a2e2323-f6f7-4498-9e8c-559a06e33231 service nova] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Received event network-vif-deleted-9426039c-799a-4219-8e85-2ab029f56643 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 902.555522] env[68674]: DEBUG oslo_vmware.api [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240293, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.169622} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.555936] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 902.558462] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b880f52-5484-4443-94a1-fc0adf698777 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.589894] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] 0e7c5243-ad33-4391-8977-b9019643e3de/0e7c5243-ad33-4391-8977-b9019643e3de.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 902.593875] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7bdd91a6-e865-4552-8603-ade2191a0722 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.617238] env[68674]: DEBUG oslo_vmware.api [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 902.617238] env[68674]: value = "task-3240295" [ 902.617238] env[68674]: _type = "Task" [ 902.617238] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.627328] env[68674]: DEBUG oslo_vmware.api [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240295, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.716884] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2b4cfb5d-3905-4214-964f-f3dc61ebdda9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "e1283f87-5bdb-4d4e-a1c5-f3b1c9180188" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 902.717287] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2b4cfb5d-3905-4214-964f-f3dc61ebdda9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "e1283f87-5bdb-4d4e-a1c5-f3b1c9180188" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 902.717758] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2b4cfb5d-3905-4214-964f-f3dc61ebdda9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "e1283f87-5bdb-4d4e-a1c5-f3b1c9180188-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 902.718057] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2b4cfb5d-3905-4214-964f-f3dc61ebdda9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "e1283f87-5bdb-4d4e-a1c5-f3b1c9180188-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 902.718333] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2b4cfb5d-3905-4214-964f-f3dc61ebdda9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "e1283f87-5bdb-4d4e-a1c5-f3b1c9180188-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 902.723891] env[68674]: DEBUG oslo_vmware.api [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': task-3240294, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.723891] env[68674]: INFO nova.compute.manager [None req-2b4cfb5d-3905-4214-964f-f3dc61ebdda9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Terminating instance [ 902.730800] env[68674]: DEBUG oslo_concurrency.lockutils [None req-65038564-642d-4b80-915c-cd495d336380 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 903.080346] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a33e04d8-751a-4758-9a64-faafa21258de {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.089148] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca4c4d8e-ee72-4bd9-9463-37cf2a85e8bc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.132631] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58917dfc-e6dc-4e65-abf0-2a4b2270c883 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.142228] env[68674]: DEBUG oslo_vmware.api [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240295, 'name': ReconfigVM_Task, 'duration_secs': 0.381568} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.143958] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Reconfigured VM instance instance-0000004c to attach disk [datastore1] 0e7c5243-ad33-4391-8977-b9019643e3de/0e7c5243-ad33-4391-8977-b9019643e3de.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 903.146031] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0cfed9f3-ab74-4145-8c26-38a9ace1c729 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.149070] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03dd55c2-f6d2-444b-8128-00f200a90763 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.164681] env[68674]: DEBUG nova.compute.provider_tree [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 903.167587] env[68674]: DEBUG oslo_vmware.api [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 903.167587] env[68674]: value = "task-3240296" [ 903.167587] env[68674]: _type = "Task" [ 903.167587] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.180038] env[68674]: DEBUG oslo_vmware.api [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240296, 'name': Rename_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.220027] env[68674]: DEBUG oslo_vmware.api [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': task-3240294, 'name': PowerOnVM_Task, 'duration_secs': 0.707398} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.220027] env[68674]: DEBUG nova.network.neutron [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Updating instance_info_cache with network_info: [{"id": "d24d2f1b-cc82-45a9-8d5c-94505a4de39f", "address": "fa:16:3e:b3:08:90", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.109", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd24d2f1b-cc", "ovs_interfaceid": "d24d2f1b-cc82-45a9-8d5c-94505a4de39f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.220027] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 903.220027] env[68674]: INFO nova.compute.manager [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Took 11.94 seconds to spawn the instance on the hypervisor. [ 903.220027] env[68674]: DEBUG nova.compute.manager [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 903.221062] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd4c6d61-189d-4977-948e-18ba0a42e652 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.226445] env[68674]: DEBUG nova.compute.manager [None req-2b4cfb5d-3905-4214-964f-f3dc61ebdda9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 903.226653] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2b4cfb5d-3905-4214-964f-f3dc61ebdda9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 903.230230] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8017d4ef-3f80-4a83-bc23-f8e59980e3be {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.239679] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b4cfb5d-3905-4214-964f-f3dc61ebdda9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 903.239932] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9b3b1f37-ac9a-4351-b1e4-eef0773839a7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.246432] env[68674]: DEBUG oslo_vmware.api [None req-2b4cfb5d-3905-4214-964f-f3dc61ebdda9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 903.246432] env[68674]: value = "task-3240297" [ 903.246432] env[68674]: _type = "Task" [ 903.246432] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.256050] env[68674]: DEBUG oslo_vmware.api [None req-2b4cfb5d-3905-4214-964f-f3dc61ebdda9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240297, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.663231] env[68674]: DEBUG oslo_concurrency.lockutils [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "ffdd1c62-1b4e-40cf-a27e-ff2877439701" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 903.663981] env[68674]: DEBUG oslo_concurrency.lockutils [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "ffdd1c62-1b4e-40cf-a27e-ff2877439701" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 903.669128] env[68674]: DEBUG nova.scheduler.client.report [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 903.688830] env[68674]: DEBUG oslo_vmware.api [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240296, 'name': Rename_Task, 'duration_secs': 0.19091} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.692977] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 903.693330] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-337ce614-09d4-4027-8851-202d82f1cd9f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.706478] env[68674]: DEBUG oslo_vmware.api [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 903.706478] env[68674]: value = "task-3240298" [ 903.706478] env[68674]: _type = "Task" [ 903.706478] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.718150] env[68674]: DEBUG oslo_vmware.api [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240298, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.721810] env[68674]: DEBUG oslo_concurrency.lockutils [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Releasing lock "refresh_cache-7aa58e2f-1202-4252-9c38-ce53084c573f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 903.755707] env[68674]: INFO nova.compute.manager [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Took 52.40 seconds to build instance. [ 903.760881] env[68674]: DEBUG oslo_vmware.rw_handles [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e6c30f-299d-f2d0-33d3-6440846b0c29/disk-0.vmdk. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 903.761801] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e11d7e82-e885-4906-afd1-3093df1105cb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.769984] env[68674]: DEBUG oslo_vmware.api [None req-2b4cfb5d-3905-4214-964f-f3dc61ebdda9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240297, 'name': PowerOffVM_Task, 'duration_secs': 0.245203} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.770967] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b4cfb5d-3905-4214-964f-f3dc61ebdda9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 903.770967] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2b4cfb5d-3905-4214-964f-f3dc61ebdda9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 903.771116] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ceb3ce0a-4392-49ff-90c0-fd14043ac09c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.775015] env[68674]: DEBUG oslo_vmware.rw_handles [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e6c30f-299d-f2d0-33d3-6440846b0c29/disk-0.vmdk is in state: ready. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 903.775191] env[68674]: ERROR oslo_vmware.rw_handles [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e6c30f-299d-f2d0-33d3-6440846b0c29/disk-0.vmdk due to incomplete transfer. [ 903.775770] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-252450d4-8774-4eec-b85c-2bc17d285142 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.784596] env[68674]: DEBUG oslo_vmware.rw_handles [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e6c30f-299d-f2d0-33d3-6440846b0c29/disk-0.vmdk. {{(pid=68674) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 903.784596] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Uploaded image 563ae351-528c-4f48-afe4-222e4f9dee21 to the Glance image server {{(pid=68674) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 903.787192] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Destroying the VM {{(pid=68674) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 903.787470] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-92cde04c-e2a2-48df-a383-a32f3d834cd2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.794751] env[68674]: DEBUG oslo_vmware.api [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 903.794751] env[68674]: value = "task-3240300" [ 903.794751] env[68674]: _type = "Task" [ 903.794751] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.806061] env[68674]: DEBUG oslo_vmware.api [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240300, 'name': Destroy_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.848920] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2b4cfb5d-3905-4214-964f-f3dc61ebdda9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 903.849241] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2b4cfb5d-3905-4214-964f-f3dc61ebdda9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 903.849365] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b4cfb5d-3905-4214-964f-f3dc61ebdda9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Deleting the datastore file [datastore2] e1283f87-5bdb-4d4e-a1c5-f3b1c9180188 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 903.852742] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5851372a-3dc1-43c6-96ca-5c7f75c8ee61 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.859578] env[68674]: DEBUG oslo_vmware.api [None req-2b4cfb5d-3905-4214-964f-f3dc61ebdda9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 903.859578] env[68674]: value = "task-3240301" [ 903.859578] env[68674]: _type = "Task" [ 903.859578] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.871199] env[68674]: DEBUG oslo_vmware.api [None req-2b4cfb5d-3905-4214-964f-f3dc61ebdda9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240301, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.166196] env[68674]: DEBUG nova.compute.manager [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 904.176212] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.139s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 904.177951] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.029s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 904.179683] env[68674]: INFO nova.compute.claims [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 904.214679] env[68674]: INFO nova.scheduler.client.report [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Deleted allocations for instance 867fd9ca-049f-441a-94bc-af60df598043 [ 904.226017] env[68674]: DEBUG oslo_vmware.api [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240298, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.226991] env[68674]: DEBUG nova.compute.manager [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=68674) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 904.227230] env[68674]: DEBUG oslo_concurrency.lockutils [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 904.261552] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c93e1913-2976-446b-8d7e-050d0f049fd3 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Lock "1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.617s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 904.308055] env[68674]: DEBUG oslo_vmware.api [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240300, 'name': Destroy_Task, 'duration_secs': 0.433761} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.308366] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Destroyed the VM [ 904.308608] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Deleting Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 904.308869] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-10110719-a14b-4786-b002-9ea92f43668a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.315848] env[68674]: DEBUG oslo_vmware.api [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 904.315848] env[68674]: value = "task-3240302" [ 904.315848] env[68674]: _type = "Task" [ 904.315848] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.325199] env[68674]: DEBUG oslo_vmware.api [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240302, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.370410] env[68674]: DEBUG oslo_vmware.api [None req-2b4cfb5d-3905-4214-964f-f3dc61ebdda9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240301, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.442063} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.370782] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b4cfb5d-3905-4214-964f-f3dc61ebdda9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 904.371046] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2b4cfb5d-3905-4214-964f-f3dc61ebdda9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 904.371296] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2b4cfb5d-3905-4214-964f-f3dc61ebdda9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 904.371531] env[68674]: INFO nova.compute.manager [None req-2b4cfb5d-3905-4214-964f-f3dc61ebdda9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Took 1.14 seconds to destroy the instance on the hypervisor. [ 904.371848] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2b4cfb5d-3905-4214-964f-f3dc61ebdda9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 904.372142] env[68674]: DEBUG nova.compute.manager [-] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 904.372299] env[68674]: DEBUG nova.network.neutron [-] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 904.540163] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f9512fee-5feb-49f4-8f10-d7c8d4133a82 tempest-DeleteServersAdminTestJSON-2002239449 tempest-DeleteServersAdminTestJSON-2002239449-project-admin] Acquiring lock "1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 904.540521] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f9512fee-5feb-49f4-8f10-d7c8d4133a82 tempest-DeleteServersAdminTestJSON-2002239449 tempest-DeleteServersAdminTestJSON-2002239449-project-admin] Lock "1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 904.541044] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f9512fee-5feb-49f4-8f10-d7c8d4133a82 tempest-DeleteServersAdminTestJSON-2002239449 tempest-DeleteServersAdminTestJSON-2002239449-project-admin] Acquiring lock "1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 904.541044] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f9512fee-5feb-49f4-8f10-d7c8d4133a82 tempest-DeleteServersAdminTestJSON-2002239449 tempest-DeleteServersAdminTestJSON-2002239449-project-admin] Lock "1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 904.541157] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f9512fee-5feb-49f4-8f10-d7c8d4133a82 tempest-DeleteServersAdminTestJSON-2002239449 tempest-DeleteServersAdminTestJSON-2002239449-project-admin] Lock "1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 904.546536] env[68674]: INFO nova.compute.manager [None req-f9512fee-5feb-49f4-8f10-d7c8d4133a82 tempest-DeleteServersAdminTestJSON-2002239449 tempest-DeleteServersAdminTestJSON-2002239449-project-admin] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Terminating instance [ 904.700460] env[68674]: DEBUG oslo_concurrency.lockutils [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 904.724181] env[68674]: DEBUG oslo_vmware.api [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240298, 'name': PowerOnVM_Task, 'duration_secs': 0.616458} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.724181] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1d15b7a1-2010-418c-961f-01c095492de9 tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Lock "867fd9ca-049f-441a-94bc-af60df598043" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.002s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 904.724979] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 904.725197] env[68674]: INFO nova.compute.manager [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Took 10.69 seconds to spawn the instance on the hypervisor. [ 904.725373] env[68674]: DEBUG nova.compute.manager [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 904.726354] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14460e07-af0d-4833-b2b9-210f20586460 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.827064] env[68674]: DEBUG oslo_vmware.api [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240302, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.051865] env[68674]: DEBUG nova.compute.manager [None req-f9512fee-5feb-49f4-8f10-d7c8d4133a82 tempest-DeleteServersAdminTestJSON-2002239449 tempest-DeleteServersAdminTestJSON-2002239449-project-admin] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 905.052112] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f9512fee-5feb-49f4-8f10-d7c8d4133a82 tempest-DeleteServersAdminTestJSON-2002239449 tempest-DeleteServersAdminTestJSON-2002239449-project-admin] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 905.053146] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c33c2f86-879f-4c4d-9e83-8552bdd8ced0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.063870] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9512fee-5feb-49f4-8f10-d7c8d4133a82 tempest-DeleteServersAdminTestJSON-2002239449 tempest-DeleteServersAdminTestJSON-2002239449-project-admin] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 905.063870] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dd932ece-ddf5-4a4d-bf96-7b672e62dfc3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.071023] env[68674]: DEBUG oslo_vmware.api [None req-f9512fee-5feb-49f4-8f10-d7c8d4133a82 tempest-DeleteServersAdminTestJSON-2002239449 tempest-DeleteServersAdminTestJSON-2002239449-project-admin] Waiting for the task: (returnval){ [ 905.071023] env[68674]: value = "task-3240303" [ 905.071023] env[68674]: _type = "Task" [ 905.071023] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.077343] env[68674]: DEBUG oslo_vmware.api [None req-f9512fee-5feb-49f4-8f10-d7c8d4133a82 tempest-DeleteServersAdminTestJSON-2002239449 tempest-DeleteServersAdminTestJSON-2002239449-project-admin] Task: {'id': task-3240303, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.158188] env[68674]: DEBUG nova.compute.manager [req-ea2b2cc6-1cb1-4656-a753-ffd9de21b2e1 req-0c800be4-fd87-464d-8c50-99c5f9770c63 service nova] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Received event network-vif-deleted-9f0aa506-1438-47ac-871c-632df3f943bf {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 905.158631] env[68674]: INFO nova.compute.manager [req-ea2b2cc6-1cb1-4656-a753-ffd9de21b2e1 req-0c800be4-fd87-464d-8c50-99c5f9770c63 service nova] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Neutron deleted interface 9f0aa506-1438-47ac-871c-632df3f943bf; detaching it from the instance and deleting it from the info cache [ 905.158631] env[68674]: DEBUG nova.network.neutron [req-ea2b2cc6-1cb1-4656-a753-ffd9de21b2e1 req-0c800be4-fd87-464d-8c50-99c5f9770c63 service nova] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.253866] env[68674]: INFO nova.compute.manager [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Took 53.24 seconds to build instance. [ 905.299994] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "f70145c9-4846-42e1-9c1c-de9759097abd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 905.300306] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "f70145c9-4846-42e1-9c1c-de9759097abd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 905.329766] env[68674]: DEBUG oslo_vmware.api [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240302, 'name': RemoveSnapshot_Task, 'duration_secs': 0.811061} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.333620] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Deleted Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 905.334088] env[68674]: DEBUG nova.compute.manager [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 905.335805] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce2f1004-a10c-4400-8580-17dc6545f5f4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.530881] env[68674]: DEBUG nova.network.neutron [-] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.579216] env[68674]: DEBUG oslo_vmware.api [None req-f9512fee-5feb-49f4-8f10-d7c8d4133a82 tempest-DeleteServersAdminTestJSON-2002239449 tempest-DeleteServersAdminTestJSON-2002239449-project-admin] Task: {'id': task-3240303, 'name': PowerOffVM_Task, 'duration_secs': 0.279067} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.583318] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9512fee-5feb-49f4-8f10-d7c8d4133a82 tempest-DeleteServersAdminTestJSON-2002239449 tempest-DeleteServersAdminTestJSON-2002239449-project-admin] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 905.583318] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f9512fee-5feb-49f4-8f10-d7c8d4133a82 tempest-DeleteServersAdminTestJSON-2002239449 tempest-DeleteServersAdminTestJSON-2002239449-project-admin] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 905.583318] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cd3ed420-5cb6-449e-93e6-311e6b602d69 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.655183] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f9512fee-5feb-49f4-8f10-d7c8d4133a82 tempest-DeleteServersAdminTestJSON-2002239449 tempest-DeleteServersAdminTestJSON-2002239449-project-admin] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 905.655418] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f9512fee-5feb-49f4-8f10-d7c8d4133a82 tempest-DeleteServersAdminTestJSON-2002239449 tempest-DeleteServersAdminTestJSON-2002239449-project-admin] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 905.655619] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9512fee-5feb-49f4-8f10-d7c8d4133a82 tempest-DeleteServersAdminTestJSON-2002239449 tempest-DeleteServersAdminTestJSON-2002239449-project-admin] Deleting the datastore file [datastore2] 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 905.656116] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fac28db1-37d0-4898-9da2-bc9b777ece34 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.659385] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57ea8fe2-4326-4d6b-bbac-62dd55a86c4f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.663217] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-99264e14-c2c4-463a-a10e-fd7d38c4042e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.666537] env[68674]: DEBUG oslo_vmware.api [None req-f9512fee-5feb-49f4-8f10-d7c8d4133a82 tempest-DeleteServersAdminTestJSON-2002239449 tempest-DeleteServersAdminTestJSON-2002239449-project-admin] Waiting for the task: (returnval){ [ 905.666537] env[68674]: value = "task-3240305" [ 905.666537] env[68674]: _type = "Task" [ 905.666537] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.674087] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c94add6d-a313-44c5-b672-4e901247b828 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.681426] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bece7c3-238b-4efb-a6d0-aa5816b715be {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.721212] env[68674]: DEBUG oslo_vmware.api [None req-f9512fee-5feb-49f4-8f10-d7c8d4133a82 tempest-DeleteServersAdminTestJSON-2002239449 tempest-DeleteServersAdminTestJSON-2002239449-project-admin] Task: {'id': task-3240305, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.722351] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5813c8a-759a-4498-bca5-a011120ea408 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.729394] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da463a48-fe63-441e-bb47-f63904e02528 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.746041] env[68674]: DEBUG nova.compute.manager [req-ea2b2cc6-1cb1-4656-a753-ffd9de21b2e1 req-0c800be4-fd87-464d-8c50-99c5f9770c63 service nova] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Detach interface failed, port_id=9f0aa506-1438-47ac-871c-632df3f943bf, reason: Instance e1283f87-5bdb-4d4e-a1c5-f3b1c9180188 could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 905.755959] env[68674]: DEBUG nova.compute.provider_tree [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 905.759154] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e536522f-4467-49d4-8238-0e717b3ff122 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "0e7c5243-ad33-4391-8977-b9019643e3de" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.874s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 905.803201] env[68674]: DEBUG nova.compute.manager [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 905.851870] env[68674]: INFO nova.compute.manager [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Shelve offloading [ 905.904022] env[68674]: DEBUG oslo_concurrency.lockutils [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Acquiring lock "040d7108-8da1-4914-b7fd-03cf09ec68aa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 905.904022] env[68674]: DEBUG oslo_concurrency.lockutils [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Lock "040d7108-8da1-4914-b7fd-03cf09ec68aa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 905.904022] env[68674]: DEBUG oslo_concurrency.lockutils [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Acquiring lock "040d7108-8da1-4914-b7fd-03cf09ec68aa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 905.904022] env[68674]: DEBUG oslo_concurrency.lockutils [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Lock "040d7108-8da1-4914-b7fd-03cf09ec68aa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 905.904022] env[68674]: DEBUG oslo_concurrency.lockutils [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Lock "040d7108-8da1-4914-b7fd-03cf09ec68aa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 905.907271] env[68674]: INFO nova.compute.manager [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Terminating instance [ 906.034339] env[68674]: INFO nova.compute.manager [-] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Took 1.66 seconds to deallocate network for instance. [ 906.181109] env[68674]: DEBUG oslo_vmware.api [None req-f9512fee-5feb-49f4-8f10-d7c8d4133a82 tempest-DeleteServersAdminTestJSON-2002239449 tempest-DeleteServersAdminTestJSON-2002239449-project-admin] Task: {'id': task-3240305, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.326516} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.181463] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9512fee-5feb-49f4-8f10-d7c8d4133a82 tempest-DeleteServersAdminTestJSON-2002239449 tempest-DeleteServersAdminTestJSON-2002239449-project-admin] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 906.181714] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f9512fee-5feb-49f4-8f10-d7c8d4133a82 tempest-DeleteServersAdminTestJSON-2002239449 tempest-DeleteServersAdminTestJSON-2002239449-project-admin] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 906.181958] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f9512fee-5feb-49f4-8f10-d7c8d4133a82 tempest-DeleteServersAdminTestJSON-2002239449 tempest-DeleteServersAdminTestJSON-2002239449-project-admin] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 906.182216] env[68674]: INFO nova.compute.manager [None req-f9512fee-5feb-49f4-8f10-d7c8d4133a82 tempest-DeleteServersAdminTestJSON-2002239449 tempest-DeleteServersAdminTestJSON-2002239449-project-admin] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Took 1.13 seconds to destroy the instance on the hypervisor. [ 906.182529] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f9512fee-5feb-49f4-8f10-d7c8d4133a82 tempest-DeleteServersAdminTestJSON-2002239449 tempest-DeleteServersAdminTestJSON-2002239449-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 906.183074] env[68674]: DEBUG nova.compute.manager [-] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 906.183259] env[68674]: DEBUG nova.network.neutron [-] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 906.259137] env[68674]: DEBUG nova.scheduler.client.report [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 906.332251] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 906.357208] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 906.357598] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7a73c088-03bc-4bc8-bab4-831778be029d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.369323] env[68674]: DEBUG oslo_vmware.api [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 906.369323] env[68674]: value = "task-3240306" [ 906.369323] env[68674]: _type = "Task" [ 906.369323] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.380274] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] VM already powered off {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 906.382744] env[68674]: DEBUG nova.compute.manager [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 906.382744] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a19079-502c-40d3-b394-ffedeacfdd82 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.391202] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquiring lock "refresh_cache-63d6c185-db2c-4ede-a716-9a0dd432ab1f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.392283] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquired lock "refresh_cache-63d6c185-db2c-4ede-a716-9a0dd432ab1f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 906.392478] env[68674]: DEBUG nova.network.neutron [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 906.414132] env[68674]: DEBUG oslo_concurrency.lockutils [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Acquiring lock "refresh_cache-040d7108-8da1-4914-b7fd-03cf09ec68aa" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.415721] env[68674]: DEBUG oslo_concurrency.lockutils [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Acquired lock "refresh_cache-040d7108-8da1-4914-b7fd-03cf09ec68aa" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 906.415721] env[68674]: DEBUG nova.network.neutron [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 906.543954] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2b4cfb5d-3905-4214-964f-f3dc61ebdda9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 906.635082] env[68674]: DEBUG oslo_concurrency.lockutils [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Acquiring lock "33313b29-abaf-4ff7-9182-abfcfb9b3220" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 906.635356] env[68674]: DEBUG oslo_concurrency.lockutils [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Lock "33313b29-abaf-4ff7-9182-abfcfb9b3220" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 906.766464] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.588s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 906.766984] env[68674]: DEBUG nova.compute.manager [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 906.772797] env[68674]: DEBUG oslo_concurrency.lockutils [None req-631a7d69-2079-4f34-9281-5998e9322581 tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.395s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 906.773051] env[68674]: DEBUG nova.objects.instance [None req-631a7d69-2079-4f34-9281-5998e9322581 tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Lazy-loading 'resources' on Instance uuid 036fbca7-be6a-43c6-972e-a71524833498 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 906.950388] env[68674]: DEBUG nova.network.neutron [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 907.068141] env[68674]: DEBUG nova.network.neutron [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.141202] env[68674]: DEBUG nova.compute.manager [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 907.153411] env[68674]: DEBUG nova.network.neutron [-] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.205710] env[68674]: DEBUG nova.compute.manager [req-7ca6be3d-d678-4aca-a8e7-3314df2124db req-d53f6f32-3d94-4f60-ac57-bfdbbaa16074 service nova] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Received event network-vif-deleted-b936c294-4b85-4fcd-9151-358b4db9d08d {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 907.205998] env[68674]: DEBUG nova.compute.manager [req-7ca6be3d-d678-4aca-a8e7-3314df2124db req-d53f6f32-3d94-4f60-ac57-bfdbbaa16074 service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Received event network-changed-7d9b4902-f03b-4046-b4ba-0bc1296918da {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 907.206246] env[68674]: DEBUG nova.compute.manager [req-7ca6be3d-d678-4aca-a8e7-3314df2124db req-d53f6f32-3d94-4f60-ac57-bfdbbaa16074 service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Refreshing instance network info cache due to event network-changed-7d9b4902-f03b-4046-b4ba-0bc1296918da. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 907.206480] env[68674]: DEBUG oslo_concurrency.lockutils [req-7ca6be3d-d678-4aca-a8e7-3314df2124db req-d53f6f32-3d94-4f60-ac57-bfdbbaa16074 service nova] Acquiring lock "refresh_cache-0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.206648] env[68674]: DEBUG oslo_concurrency.lockutils [req-7ca6be3d-d678-4aca-a8e7-3314df2124db req-d53f6f32-3d94-4f60-ac57-bfdbbaa16074 service nova] Acquired lock "refresh_cache-0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 907.206833] env[68674]: DEBUG nova.network.neutron [req-7ca6be3d-d678-4aca-a8e7-3314df2124db req-d53f6f32-3d94-4f60-ac57-bfdbbaa16074 service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Refreshing network info cache for port 7d9b4902-f03b-4046-b4ba-0bc1296918da {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 907.277847] env[68674]: DEBUG nova.compute.utils [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 907.282439] env[68674]: DEBUG nova.compute.manager [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 907.282439] env[68674]: DEBUG nova.network.neutron [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 907.293358] env[68674]: DEBUG nova.network.neutron [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Updating instance_info_cache with network_info: [{"id": "2cf52206-a0c1-4b57-886d-23df69181f20", "address": "fa:16:3e:be:8f:42", "network": {"id": "e4b29de6-94e6-452e-b362-eb8d7dd615b9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2121858122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2bca98e5a30741249b1bdee899ffe433", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721c6720-3ce0-450e-9951-a894f03acc27", "external-id": "nsx-vlan-transportzone-394", "segmentation_id": 394, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cf52206-a0", "ovs_interfaceid": "2cf52206-a0c1-4b57-886d-23df69181f20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.384023] env[68674]: DEBUG nova.policy [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5080a4f68ef1482caaee5aa26614e6f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c958fcb56a934ef7919b76aa2a193429', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 907.571832] env[68674]: DEBUG oslo_concurrency.lockutils [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Releasing lock "refresh_cache-040d7108-8da1-4914-b7fd-03cf09ec68aa" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 907.571832] env[68674]: DEBUG nova.compute.manager [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 907.571961] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 907.572953] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d11e49a9-5e7b-41ed-ab67-19c493ed52c1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.591587] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 907.591587] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8ea9ddbf-b085-452a-a19b-70a16641eb20 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.598040] env[68674]: DEBUG oslo_vmware.api [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Waiting for the task: (returnval){ [ 907.598040] env[68674]: value = "task-3240307" [ 907.598040] env[68674]: _type = "Task" [ 907.598040] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.608452] env[68674]: DEBUG oslo_vmware.api [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240307, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.655393] env[68674]: INFO nova.compute.manager [-] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Took 1.47 seconds to deallocate network for instance. [ 907.666963] env[68674]: DEBUG oslo_concurrency.lockutils [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 907.774290] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-926f79aa-2d1b-41fd-9e0f-9ae84bf9b8ac {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.782662] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f1ef6e5-911a-4527-98ea-3a0218f54de2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.787065] env[68674]: DEBUG nova.compute.manager [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 907.825871] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Releasing lock "refresh_cache-63d6c185-db2c-4ede-a716-9a0dd432ab1f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 907.831008] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8de9de8-729a-4eef-ada8-4cba6ed241f3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.839307] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad7da4f4-be2e-4ff1-a5a7-2e67c2f02373 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.853360] env[68674]: DEBUG nova.compute.provider_tree [None req-631a7d69-2079-4f34-9281-5998e9322581 tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 908.108632] env[68674]: DEBUG oslo_vmware.api [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240307, 'name': PowerOffVM_Task, 'duration_secs': 0.161931} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.108918] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 908.109106] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 908.109374] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5052037b-b815-4158-96f4-8ce33e87a57b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.133295] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 908.133539] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 908.133770] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Deleting the datastore file [datastore2] 040d7108-8da1-4914-b7fd-03cf09ec68aa {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 908.134050] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e40e85f1-aad8-47a9-b7e1-61ab65636ae0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.146262] env[68674]: DEBUG oslo_vmware.api [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Waiting for the task: (returnval){ [ 908.146262] env[68674]: value = "task-3240309" [ 908.146262] env[68674]: _type = "Task" [ 908.146262] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.151975] env[68674]: DEBUG oslo_vmware.api [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240309, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.165490] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f9512fee-5feb-49f4-8f10-d7c8d4133a82 tempest-DeleteServersAdminTestJSON-2002239449 tempest-DeleteServersAdminTestJSON-2002239449-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 908.365041] env[68674]: DEBUG nova.scheduler.client.report [None req-631a7d69-2079-4f34-9281-5998e9322581 tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 908.565786] env[68674]: DEBUG nova.network.neutron [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Successfully created port: e6ae43a2-a24c-4187-aba3-c546140142b9 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 908.658896] env[68674]: DEBUG oslo_vmware.api [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Task: {'id': task-3240309, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.099629} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.659164] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 908.659340] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 908.659513] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 908.659679] env[68674]: INFO nova.compute.manager [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Took 1.09 seconds to destroy the instance on the hypervisor. [ 908.659909] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 908.660113] env[68674]: DEBUG nova.compute.manager [-] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 908.660205] env[68674]: DEBUG nova.network.neutron [-] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 908.678211] env[68674]: DEBUG nova.network.neutron [-] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 908.693658] env[68674]: DEBUG nova.network.neutron [req-7ca6be3d-d678-4aca-a8e7-3314df2124db req-d53f6f32-3d94-4f60-ac57-bfdbbaa16074 service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Updated VIF entry in instance network info cache for port 7d9b4902-f03b-4046-b4ba-0bc1296918da. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 908.694042] env[68674]: DEBUG nova.network.neutron [req-7ca6be3d-d678-4aca-a8e7-3314df2124db req-d53f6f32-3d94-4f60-ac57-bfdbbaa16074 service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Updating instance_info_cache with network_info: [{"id": "7d9b4902-f03b-4046-b4ba-0bc1296918da", "address": "fa:16:3e:17:ab:03", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.195", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d9b4902-f0", "ovs_interfaceid": "7d9b4902-f03b-4046-b4ba-0bc1296918da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.801445] env[68674]: DEBUG nova.compute.manager [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 908.835650] env[68674]: DEBUG nova.virt.hardware [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 908.835732] env[68674]: DEBUG nova.virt.hardware [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 908.835879] env[68674]: DEBUG nova.virt.hardware [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 908.836076] env[68674]: DEBUG nova.virt.hardware [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 908.836227] env[68674]: DEBUG nova.virt.hardware [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 908.836373] env[68674]: DEBUG nova.virt.hardware [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 908.836984] env[68674]: DEBUG nova.virt.hardware [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 908.836984] env[68674]: DEBUG nova.virt.hardware [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 908.836984] env[68674]: DEBUG nova.virt.hardware [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 908.837286] env[68674]: DEBUG nova.virt.hardware [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 908.837286] env[68674]: DEBUG nova.virt.hardware [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 908.838392] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7a7e9d5-ead5-43e6-9166-98ebf79c684b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.846475] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d3e5848-8217-4ad8-8dbd-5205d48d40c8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.873542] env[68674]: DEBUG oslo_concurrency.lockutils [None req-631a7d69-2079-4f34-9281-5998e9322581 tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.101s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 908.880510] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.080s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 908.882372] env[68674]: INFO nova.compute.claims [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 908.911509] env[68674]: INFO nova.scheduler.client.report [None req-631a7d69-2079-4f34-9281-5998e9322581 tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Deleted allocations for instance 036fbca7-be6a-43c6-972e-a71524833498 [ 909.014148] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 909.015062] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1b088da-2939-4c2d-9bd3-d57845b0f164 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.024504] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 909.024861] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-70ba271a-436e-410b-a742-c3a0cc598bd5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.100328] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 909.100928] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 909.101175] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Deleting the datastore file [datastore2] 63d6c185-db2c-4ede-a716-9a0dd432ab1f {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 909.101468] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2afc7b76-7588-450f-8a47-4d7dfb2c10ba {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.108794] env[68674]: DEBUG oslo_vmware.api [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 909.108794] env[68674]: value = "task-3240311" [ 909.108794] env[68674]: _type = "Task" [ 909.108794] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.119214] env[68674]: DEBUG oslo_vmware.api [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240311, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.181482] env[68674]: DEBUG nova.network.neutron [-] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.196573] env[68674]: DEBUG oslo_concurrency.lockutils [req-7ca6be3d-d678-4aca-a8e7-3314df2124db req-d53f6f32-3d94-4f60-ac57-bfdbbaa16074 service nova] Releasing lock "refresh_cache-0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 909.357302] env[68674]: DEBUG nova.compute.manager [req-afa855e9-6e97-401b-8b4e-faed32f16765 req-c2a6122e-2091-4364-abc8-e21abe58b60c service nova] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Received event network-vif-unplugged-2cf52206-a0c1-4b57-886d-23df69181f20 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 909.357534] env[68674]: DEBUG oslo_concurrency.lockutils [req-afa855e9-6e97-401b-8b4e-faed32f16765 req-c2a6122e-2091-4364-abc8-e21abe58b60c service nova] Acquiring lock "63d6c185-db2c-4ede-a716-9a0dd432ab1f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 909.357752] env[68674]: DEBUG oslo_concurrency.lockutils [req-afa855e9-6e97-401b-8b4e-faed32f16765 req-c2a6122e-2091-4364-abc8-e21abe58b60c service nova] Lock "63d6c185-db2c-4ede-a716-9a0dd432ab1f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 909.357928] env[68674]: DEBUG oslo_concurrency.lockutils [req-afa855e9-6e97-401b-8b4e-faed32f16765 req-c2a6122e-2091-4364-abc8-e21abe58b60c service nova] Lock "63d6c185-db2c-4ede-a716-9a0dd432ab1f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 909.362227] env[68674]: DEBUG nova.compute.manager [req-afa855e9-6e97-401b-8b4e-faed32f16765 req-c2a6122e-2091-4364-abc8-e21abe58b60c service nova] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] No waiting events found dispatching network-vif-unplugged-2cf52206-a0c1-4b57-886d-23df69181f20 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 909.362528] env[68674]: WARNING nova.compute.manager [req-afa855e9-6e97-401b-8b4e-faed32f16765 req-c2a6122e-2091-4364-abc8-e21abe58b60c service nova] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Received unexpected event network-vif-unplugged-2cf52206-a0c1-4b57-886d-23df69181f20 for instance with vm_state shelved and task_state shelving_offloading. [ 909.362789] env[68674]: DEBUG nova.compute.manager [req-afa855e9-6e97-401b-8b4e-faed32f16765 req-c2a6122e-2091-4364-abc8-e21abe58b60c service nova] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Received event network-changed-2cf52206-a0c1-4b57-886d-23df69181f20 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 909.363013] env[68674]: DEBUG nova.compute.manager [req-afa855e9-6e97-401b-8b4e-faed32f16765 req-c2a6122e-2091-4364-abc8-e21abe58b60c service nova] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Refreshing instance network info cache due to event network-changed-2cf52206-a0c1-4b57-886d-23df69181f20. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 909.363264] env[68674]: DEBUG oslo_concurrency.lockutils [req-afa855e9-6e97-401b-8b4e-faed32f16765 req-c2a6122e-2091-4364-abc8-e21abe58b60c service nova] Acquiring lock "refresh_cache-63d6c185-db2c-4ede-a716-9a0dd432ab1f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.363420] env[68674]: DEBUG oslo_concurrency.lockutils [req-afa855e9-6e97-401b-8b4e-faed32f16765 req-c2a6122e-2091-4364-abc8-e21abe58b60c service nova] Acquired lock "refresh_cache-63d6c185-db2c-4ede-a716-9a0dd432ab1f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 909.363596] env[68674]: DEBUG nova.network.neutron [req-afa855e9-6e97-401b-8b4e-faed32f16765 req-c2a6122e-2091-4364-abc8-e21abe58b60c service nova] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Refreshing network info cache for port 2cf52206-a0c1-4b57-886d-23df69181f20 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 909.423888] env[68674]: DEBUG oslo_concurrency.lockutils [None req-631a7d69-2079-4f34-9281-5998e9322581 tempest-TenantUsagesTestJSON-408667991 tempest-TenantUsagesTestJSON-408667991-project-member] Lock "036fbca7-be6a-43c6-972e-a71524833498" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.843s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 909.623997] env[68674]: DEBUG oslo_vmware.api [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240311, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132809} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.625483] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 909.625677] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 909.626584] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 909.664204] env[68674]: INFO nova.scheduler.client.report [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Deleted allocations for instance 63d6c185-db2c-4ede-a716-9a0dd432ab1f [ 909.684515] env[68674]: INFO nova.compute.manager [-] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Took 1.02 seconds to deallocate network for instance. [ 910.135691] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9d8243b3-71eb-4c55-b7cf-b3c18483eb3d tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Acquiring lock "0eaf7d72-755b-4977-8f71-7d53ad1cf573" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 910.135990] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9d8243b3-71eb-4c55-b7cf-b3c18483eb3d tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Lock "0eaf7d72-755b-4977-8f71-7d53ad1cf573" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 910.136155] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9d8243b3-71eb-4c55-b7cf-b3c18483eb3d tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Acquiring lock "0eaf7d72-755b-4977-8f71-7d53ad1cf573-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 910.136268] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9d8243b3-71eb-4c55-b7cf-b3c18483eb3d tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Lock "0eaf7d72-755b-4977-8f71-7d53ad1cf573-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 910.136439] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9d8243b3-71eb-4c55-b7cf-b3c18483eb3d tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Lock "0eaf7d72-755b-4977-8f71-7d53ad1cf573-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 910.141373] env[68674]: INFO nova.compute.manager [None req-9d8243b3-71eb-4c55-b7cf-b3c18483eb3d tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Terminating instance [ 910.171267] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 910.191742] env[68674]: DEBUG oslo_concurrency.lockutils [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 910.265193] env[68674]: DEBUG nova.network.neutron [req-afa855e9-6e97-401b-8b4e-faed32f16765 req-c2a6122e-2091-4364-abc8-e21abe58b60c service nova] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Updated VIF entry in instance network info cache for port 2cf52206-a0c1-4b57-886d-23df69181f20. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 910.265623] env[68674]: DEBUG nova.network.neutron [req-afa855e9-6e97-401b-8b4e-faed32f16765 req-c2a6122e-2091-4364-abc8-e21abe58b60c service nova] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Updating instance_info_cache with network_info: [{"id": "2cf52206-a0c1-4b57-886d-23df69181f20", "address": "fa:16:3e:be:8f:42", "network": {"id": "e4b29de6-94e6-452e-b362-eb8d7dd615b9", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-2121858122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2bca98e5a30741249b1bdee899ffe433", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap2cf52206-a0", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.300984] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Acquiring lock "082fd3a5-b30e-41cc-8fba-dab2802a1e3e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 910.301236] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Lock "082fd3a5-b30e-41cc-8fba-dab2802a1e3e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 910.371497] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24852c7d-61a5-4417-9fce-593db14fc457 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.380147] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ef2441f-d6fa-45ae-ba56-9554ec63eb31 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.410208] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f80297-f66e-4f58-b0ae-a891bbd08dda {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.417985] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46ac2795-294e-48b6-8974-79809e8fb76b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.431641] env[68674]: DEBUG nova.compute.provider_tree [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 910.492354] env[68674]: DEBUG nova.network.neutron [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Successfully updated port: e6ae43a2-a24c-4187-aba3-c546140142b9 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 910.652210] env[68674]: DEBUG nova.compute.manager [None req-9d8243b3-71eb-4c55-b7cf-b3c18483eb3d tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 910.652452] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9d8243b3-71eb-4c55-b7cf-b3c18483eb3d tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 910.653381] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0563c4ed-f2ea-456a-8576-c4e0acd4365a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.662110] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d8243b3-71eb-4c55-b7cf-b3c18483eb3d tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 910.662110] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9dadb7c3-42dc-4dd0-bc87-bfb4f90a2825 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.668068] env[68674]: DEBUG oslo_vmware.api [None req-9d8243b3-71eb-4c55-b7cf-b3c18483eb3d tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Waiting for the task: (returnval){ [ 910.668068] env[68674]: value = "task-3240312" [ 910.668068] env[68674]: _type = "Task" [ 910.668068] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.677383] env[68674]: DEBUG oslo_vmware.api [None req-9d8243b3-71eb-4c55-b7cf-b3c18483eb3d tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Task: {'id': task-3240312, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.769300] env[68674]: DEBUG oslo_concurrency.lockutils [req-afa855e9-6e97-401b-8b4e-faed32f16765 req-c2a6122e-2091-4364-abc8-e21abe58b60c service nova] Releasing lock "refresh_cache-63d6c185-db2c-4ede-a716-9a0dd432ab1f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 910.806729] env[68674]: DEBUG nova.compute.manager [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 910.935305] env[68674]: DEBUG nova.scheduler.client.report [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 910.996194] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "refresh_cache-2d02adff-9fbf-4889-99e4-4efde5a51b33" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.996327] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquired lock "refresh_cache-2d02adff-9fbf-4889-99e4-4efde5a51b33" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 910.996478] env[68674]: DEBUG nova.network.neutron [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 911.178643] env[68674]: DEBUG oslo_vmware.api [None req-9d8243b3-71eb-4c55-b7cf-b3c18483eb3d tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Task: {'id': task-3240312, 'name': PowerOffVM_Task, 'duration_secs': 0.197963} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.179183] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d8243b3-71eb-4c55-b7cf-b3c18483eb3d tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 911.179183] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9d8243b3-71eb-4c55-b7cf-b3c18483eb3d tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 911.179353] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-582e87fd-fc00-4567-ba6c-7b7b3549e5e7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.241829] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9d8243b3-71eb-4c55-b7cf-b3c18483eb3d tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 911.244028] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9d8243b3-71eb-4c55-b7cf-b3c18483eb3d tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 911.244028] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d8243b3-71eb-4c55-b7cf-b3c18483eb3d tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Deleting the datastore file [datastore1] 0eaf7d72-755b-4977-8f71-7d53ad1cf573 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 911.244028] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-20f99239-8792-4e85-9703-58253728c674 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.250646] env[68674]: DEBUG oslo_vmware.api [None req-9d8243b3-71eb-4c55-b7cf-b3c18483eb3d tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Waiting for the task: (returnval){ [ 911.250646] env[68674]: value = "task-3240314" [ 911.250646] env[68674]: _type = "Task" [ 911.250646] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.261392] env[68674]: DEBUG oslo_vmware.api [None req-9d8243b3-71eb-4c55-b7cf-b3c18483eb3d tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Task: {'id': task-3240314, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.333293] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 911.440201] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.560s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.440861] env[68674]: DEBUG nova.compute.manager [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 911.444780] env[68674]: DEBUG oslo_concurrency.lockutils [None req-94bf0518-d032-462c-99ff-8ed7f0ae2ed4 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 17.993s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.495280] env[68674]: DEBUG nova.compute.manager [req-6c4cecf3-8bef-4e2f-9198-3e5b2db5a50f req-f881807e-e926-497f-b80a-a964a63bba07 service nova] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Received event network-vif-plugged-e6ae43a2-a24c-4187-aba3-c546140142b9 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 911.495495] env[68674]: DEBUG oslo_concurrency.lockutils [req-6c4cecf3-8bef-4e2f-9198-3e5b2db5a50f req-f881807e-e926-497f-b80a-a964a63bba07 service nova] Acquiring lock "2d02adff-9fbf-4889-99e4-4efde5a51b33-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 911.495690] env[68674]: DEBUG oslo_concurrency.lockutils [req-6c4cecf3-8bef-4e2f-9198-3e5b2db5a50f req-f881807e-e926-497f-b80a-a964a63bba07 service nova] Lock "2d02adff-9fbf-4889-99e4-4efde5a51b33-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.495850] env[68674]: DEBUG oslo_concurrency.lockutils [req-6c4cecf3-8bef-4e2f-9198-3e5b2db5a50f req-f881807e-e926-497f-b80a-a964a63bba07 service nova] Lock "2d02adff-9fbf-4889-99e4-4efde5a51b33-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.498760] env[68674]: DEBUG nova.compute.manager [req-6c4cecf3-8bef-4e2f-9198-3e5b2db5a50f req-f881807e-e926-497f-b80a-a964a63bba07 service nova] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] No waiting events found dispatching network-vif-plugged-e6ae43a2-a24c-4187-aba3-c546140142b9 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 911.498760] env[68674]: WARNING nova.compute.manager [req-6c4cecf3-8bef-4e2f-9198-3e5b2db5a50f req-f881807e-e926-497f-b80a-a964a63bba07 service nova] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Received unexpected event network-vif-plugged-e6ae43a2-a24c-4187-aba3-c546140142b9 for instance with vm_state building and task_state spawning. [ 911.498760] env[68674]: DEBUG nova.compute.manager [req-6c4cecf3-8bef-4e2f-9198-3e5b2db5a50f req-f881807e-e926-497f-b80a-a964a63bba07 service nova] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Received event network-changed-e6ae43a2-a24c-4187-aba3-c546140142b9 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 911.498760] env[68674]: DEBUG nova.compute.manager [req-6c4cecf3-8bef-4e2f-9198-3e5b2db5a50f req-f881807e-e926-497f-b80a-a964a63bba07 service nova] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Refreshing instance network info cache due to event network-changed-e6ae43a2-a24c-4187-aba3-c546140142b9. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 911.498973] env[68674]: DEBUG oslo_concurrency.lockutils [req-6c4cecf3-8bef-4e2f-9198-3e5b2db5a50f req-f881807e-e926-497f-b80a-a964a63bba07 service nova] Acquiring lock "refresh_cache-2d02adff-9fbf-4889-99e4-4efde5a51b33" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.550257] env[68674]: DEBUG nova.network.neutron [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 911.767535] env[68674]: DEBUG oslo_vmware.api [None req-9d8243b3-71eb-4c55-b7cf-b3c18483eb3d tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Task: {'id': task-3240314, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157267} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.771020] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d8243b3-71eb-4c55-b7cf-b3c18483eb3d tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 911.771020] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9d8243b3-71eb-4c55-b7cf-b3c18483eb3d tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 911.771020] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9d8243b3-71eb-4c55-b7cf-b3c18483eb3d tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 911.771020] env[68674]: INFO nova.compute.manager [None req-9d8243b3-71eb-4c55-b7cf-b3c18483eb3d tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Took 1.12 seconds to destroy the instance on the hypervisor. [ 911.771256] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9d8243b3-71eb-4c55-b7cf-b3c18483eb3d tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 911.771331] env[68674]: DEBUG nova.compute.manager [-] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 911.771483] env[68674]: DEBUG nova.network.neutron [-] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 911.879903] env[68674]: DEBUG nova.network.neutron [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Updating instance_info_cache with network_info: [{"id": "e6ae43a2-a24c-4187-aba3-c546140142b9", "address": "fa:16:3e:e6:44:6b", "network": {"id": "eae87694-bbf6-4eed-9305-26be80e0529b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1262353116-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c958fcb56a934ef7919b76aa2a193429", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6ae43a2-a2", "ovs_interfaceid": "e6ae43a2-a24c-4187-aba3-c546140142b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.949198] env[68674]: DEBUG nova.compute.utils [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 911.952767] env[68674]: DEBUG nova.compute.manager [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Not allocating networking since 'none' was specified. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 912.388023] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Releasing lock "refresh_cache-2d02adff-9fbf-4889-99e4-4efde5a51b33" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 912.388023] env[68674]: DEBUG nova.compute.manager [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Instance network_info: |[{"id": "e6ae43a2-a24c-4187-aba3-c546140142b9", "address": "fa:16:3e:e6:44:6b", "network": {"id": "eae87694-bbf6-4eed-9305-26be80e0529b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1262353116-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c958fcb56a934ef7919b76aa2a193429", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6ae43a2-a2", "ovs_interfaceid": "e6ae43a2-a24c-4187-aba3-c546140142b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 912.388023] env[68674]: DEBUG oslo_concurrency.lockutils [req-6c4cecf3-8bef-4e2f-9198-3e5b2db5a50f req-f881807e-e926-497f-b80a-a964a63bba07 service nova] Acquired lock "refresh_cache-2d02adff-9fbf-4889-99e4-4efde5a51b33" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 912.388023] env[68674]: DEBUG nova.network.neutron [req-6c4cecf3-8bef-4e2f-9198-3e5b2db5a50f req-f881807e-e926-497f-b80a-a964a63bba07 service nova] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Refreshing network info cache for port e6ae43a2-a24c-4187-aba3-c546140142b9 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 912.388023] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:44:6b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b98c49ac-0eb7-4311-aa8f-60581b2ce706', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e6ae43a2-a24c-4187-aba3-c546140142b9', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 912.396161] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 912.398207] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 912.401215] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ca375709-e336-4664-ba1f-8423f025edfc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.427990] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 912.427990] env[68674]: value = "task-3240315" [ 912.427990] env[68674]: _type = "Task" [ 912.427990] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.440738] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240315, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.455349] env[68674]: DEBUG nova.compute.manager [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 912.514558] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3fcaed4-abcb-4e52-9855-b3f6c12961c2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.524157] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c59d336e-ba71-4d36-8380-c6242d42c92b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.565217] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b9136f0-462d-4bb6-a077-09b44d3ad311 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.576077] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1390132b-867e-407b-b542-24874793c955 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.594325] env[68674]: DEBUG nova.compute.provider_tree [None req-94bf0518-d032-462c-99ff-8ed7f0ae2ed4 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 912.632172] env[68674]: DEBUG oslo_concurrency.lockutils [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Acquiring lock "e894cd36-95c8-473b-9bbd-483f11fb5add" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 912.632978] env[68674]: DEBUG oslo_concurrency.lockutils [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Lock "e894cd36-95c8-473b-9bbd-483f11fb5add" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 912.939988] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240315, 'name': CreateVM_Task, 'duration_secs': 0.35628} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.940929] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 912.941878] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.941878] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 912.941878] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 912.942283] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-635fd518-d185-4405-9b0d-701bfccc7191 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.948090] env[68674]: DEBUG oslo_vmware.api [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 912.948090] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a74a87-e6a6-e139-e394-5cff64b7afdc" [ 912.948090] env[68674]: _type = "Task" [ 912.948090] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.960755] env[68674]: DEBUG oslo_vmware.api [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a74a87-e6a6-e139-e394-5cff64b7afdc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.083569] env[68674]: DEBUG nova.network.neutron [-] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.102216] env[68674]: DEBUG nova.scheduler.client.report [None req-94bf0518-d032-462c-99ff-8ed7f0ae2ed4 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 913.132679] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquiring lock "63d6c185-db2c-4ede-a716-9a0dd432ab1f" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 913.135110] env[68674]: DEBUG nova.compute.manager [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 913.153202] env[68674]: DEBUG nova.network.neutron [req-6c4cecf3-8bef-4e2f-9198-3e5b2db5a50f req-f881807e-e926-497f-b80a-a964a63bba07 service nova] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Updated VIF entry in instance network info cache for port e6ae43a2-a24c-4187-aba3-c546140142b9. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 913.153546] env[68674]: DEBUG nova.network.neutron [req-6c4cecf3-8bef-4e2f-9198-3e5b2db5a50f req-f881807e-e926-497f-b80a-a964a63bba07 service nova] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Updating instance_info_cache with network_info: [{"id": "e6ae43a2-a24c-4187-aba3-c546140142b9", "address": "fa:16:3e:e6:44:6b", "network": {"id": "eae87694-bbf6-4eed-9305-26be80e0529b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1262353116-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c958fcb56a934ef7919b76aa2a193429", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6ae43a2-a2", "ovs_interfaceid": "e6ae43a2-a24c-4187-aba3-c546140142b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.463265] env[68674]: DEBUG oslo_vmware.api [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a74a87-e6a6-e139-e394-5cff64b7afdc, 'name': SearchDatastore_Task, 'duration_secs': 0.027802} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.463265] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 913.464492] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 913.465527] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.465527] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 913.465527] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 913.466060] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a4e311fc-3845-4126-a4dd-30d3b89231c1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.471234] env[68674]: DEBUG nova.compute.manager [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 913.476713] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 913.476713] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 913.478957] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26cc7760-48df-4f52-aa9b-9a2a22aa6a27 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.484106] env[68674]: DEBUG oslo_vmware.api [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 913.484106] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b95fba-9991-369c-a25d-7f3b79ae4a7f" [ 913.484106] env[68674]: _type = "Task" [ 913.484106] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.493060] env[68674]: DEBUG oslo_vmware.api [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b95fba-9991-369c-a25d-7f3b79ae4a7f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.511747] env[68674]: DEBUG nova.virt.hardware [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 913.512194] env[68674]: DEBUG nova.virt.hardware [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 913.512408] env[68674]: DEBUG nova.virt.hardware [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 913.512820] env[68674]: DEBUG nova.virt.hardware [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 913.513272] env[68674]: DEBUG nova.virt.hardware [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 913.515019] env[68674]: DEBUG nova.virt.hardware [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 913.515019] env[68674]: DEBUG nova.virt.hardware [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 913.515019] env[68674]: DEBUG nova.virt.hardware [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 913.515019] env[68674]: DEBUG nova.virt.hardware [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 913.515019] env[68674]: DEBUG nova.virt.hardware [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 913.515019] env[68674]: DEBUG nova.virt.hardware [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 913.515949] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56739932-d62c-4e94-973b-955138b0239a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.526602] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99f9ba8b-0620-468e-a586-b83dc873cd24 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.539800] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Instance VIF info [] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 913.545819] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Creating folder: Project (47528bb3f30f4831aa7c9f4392f16ee2). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 913.546176] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-01f48915-fe6c-4a3d-9604-f3074d6fad51 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.556855] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Created folder: Project (47528bb3f30f4831aa7c9f4392f16ee2) in parent group-v647377. [ 913.556987] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Creating folder: Instances. Parent ref: group-v647595. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 913.557255] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-96830f73-2831-4fd1-814a-55666d511ec9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.563651] env[68674]: DEBUG nova.compute.manager [req-63a95697-f0f6-4521-a567-1930eb938bf0 req-7010d403-6a88-4d08-9717-7464eeebdb5d service nova] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Received event network-vif-deleted-6ef03ee8-7859-4976-be77-54e193e997a1 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 913.567430] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Created folder: Instances in parent group-v647595. [ 913.567656] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 913.567885] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 913.568047] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a7ef848e-153c-411b-acc4-ed0bb865af29 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.586417] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 913.586417] env[68674]: value = "task-3240318" [ 913.586417] env[68674]: _type = "Task" [ 913.586417] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.587168] env[68674]: INFO nova.compute.manager [-] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Took 1.82 seconds to deallocate network for instance. [ 913.609539] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240318, 'name': CreateVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.657311] env[68674]: DEBUG oslo_concurrency.lockutils [req-6c4cecf3-8bef-4e2f-9198-3e5b2db5a50f req-f881807e-e926-497f-b80a-a964a63bba07 service nova] Releasing lock "refresh_cache-2d02adff-9fbf-4889-99e4-4efde5a51b33" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 913.668309] env[68674]: DEBUG oslo_concurrency.lockutils [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 913.999398] env[68674]: DEBUG oslo_vmware.api [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b95fba-9991-369c-a25d-7f3b79ae4a7f, 'name': SearchDatastore_Task, 'duration_secs': 0.0214} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.001810] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9694c84-870b-4b5c-aa87-1d60d2a19994 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.009999] env[68674]: DEBUG oslo_vmware.api [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 914.009999] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52706c88-bb27-0038-db53-4087ca1826df" [ 914.009999] env[68674]: _type = "Task" [ 914.009999] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.020222] env[68674]: DEBUG oslo_vmware.api [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52706c88-bb27-0038-db53-4087ca1826df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.103069] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9d8243b3-71eb-4c55-b7cf-b3c18483eb3d tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 914.103807] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240318, 'name': CreateVM_Task, 'duration_secs': 0.388137} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.104916] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 914.105394] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.105973] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 914.106619] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 914.106619] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8dae5f74-13ed-49fd-be29-5dbf4112a740 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.113285] env[68674]: DEBUG oslo_concurrency.lockutils [None req-94bf0518-d032-462c-99ff-8ed7f0ae2ed4 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.668s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 914.113285] env[68674]: DEBUG nova.compute.manager [None req-94bf0518-d032-462c-99ff-8ed7f0ae2ed4 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=68674) _confirm_resize /opt/stack/nova/nova/compute/manager.py:5376}} [ 914.119323] env[68674]: DEBUG oslo_vmware.api [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Waiting for the task: (returnval){ [ 914.119323] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525c3201-dec8-8a7d-757d-e57ad3042560" [ 914.119323] env[68674]: _type = "Task" [ 914.119323] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.119812] env[68674]: DEBUG oslo_concurrency.lockutils [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.367s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 914.120039] env[68674]: DEBUG nova.objects.instance [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Lazy-loading 'resources' on Instance uuid f147b483-9384-4fc1-996e-e8fb035c1942 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 914.132858] env[68674]: DEBUG oslo_vmware.api [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525c3201-dec8-8a7d-757d-e57ad3042560, 'name': SearchDatastore_Task, 'duration_secs': 0.008913} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.134026] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 914.134138] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 914.134651] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.525360] env[68674]: DEBUG oslo_vmware.api [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52706c88-bb27-0038-db53-4087ca1826df, 'name': SearchDatastore_Task, 'duration_secs': 0.014214} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.525681] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 914.525901] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 2d02adff-9fbf-4889-99e4-4efde5a51b33/2d02adff-9fbf-4889-99e4-4efde5a51b33.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 914.526208] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 914.526395] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 914.526634] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fe470ff8-09c6-49d2-aa14-215ccd372a13 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.528864] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-34f1aa33-838b-45af-b572-78b32637f4c2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.536291] env[68674]: DEBUG oslo_vmware.api [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 914.536291] env[68674]: value = "task-3240319" [ 914.536291] env[68674]: _type = "Task" [ 914.536291] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.540228] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 914.540424] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 914.545163] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca726297-4424-408b-8454-809774527f1d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.550863] env[68674]: DEBUG oslo_vmware.api [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240319, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.553980] env[68674]: DEBUG oslo_vmware.api [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Waiting for the task: (returnval){ [ 914.553980] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5285306a-c13d-a5c2-65ee-681bbcc8cb11" [ 914.553980] env[68674]: _type = "Task" [ 914.553980] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.562239] env[68674]: DEBUG oslo_vmware.api [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5285306a-c13d-a5c2-65ee-681bbcc8cb11, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.715281] env[68674]: INFO nova.scheduler.client.report [None req-94bf0518-d032-462c-99ff-8ed7f0ae2ed4 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Deleted allocation for migration dbcb4b5d-eb95-4807-bf85-9adfb2b1f475 [ 915.051244] env[68674]: DEBUG oslo_vmware.api [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240319, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.47052} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.052032] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 2d02adff-9fbf-4889-99e4-4efde5a51b33/2d02adff-9fbf-4889-99e4-4efde5a51b33.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 915.052032] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 915.052032] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8ae0f0cb-6611-4cdf-922b-2aacf3937030 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.061322] env[68674]: DEBUG oslo_vmware.api [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 915.061322] env[68674]: value = "task-3240320" [ 915.061322] env[68674]: _type = "Task" [ 915.061322] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.069663] env[68674]: DEBUG oslo_vmware.api [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5285306a-c13d-a5c2-65ee-681bbcc8cb11, 'name': SearchDatastore_Task, 'duration_secs': 0.00881} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.071328] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54d6a021-4cb3-4e36-8889-94d035b1dbb3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.081137] env[68674]: DEBUG oslo_vmware.api [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240320, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.086771] env[68674]: DEBUG oslo_vmware.api [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Waiting for the task: (returnval){ [ 915.086771] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a841fb-5635-dcf8-4cf8-b7a8eca09ab3" [ 915.086771] env[68674]: _type = "Task" [ 915.086771] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.094268] env[68674]: DEBUG oslo_vmware.api [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a841fb-5635-dcf8-4cf8-b7a8eca09ab3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.109861] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63c9213f-ea94-421d-8d3f-3d17657c71c9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.118146] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a514d8c4-f502-42bb-85a0-d95d6cadc7e7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.155982] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42404071-50cd-44d0-a3e2-92e2aa8fd78d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.165616] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41e85c54-8815-4e61-bd59-0f4d1d0ddf65 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.180953] env[68674]: DEBUG nova.compute.provider_tree [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 915.227064] env[68674]: DEBUG oslo_concurrency.lockutils [None req-94bf0518-d032-462c-99ff-8ed7f0ae2ed4 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "3d85c8c4-f09c-4f75-aff5-9a49d84ae006" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 25.144s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 915.248050] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Acquiring lock "a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 915.248671] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Lock "a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 915.571631] env[68674]: DEBUG oslo_vmware.api [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240320, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.125044} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.571973] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 915.572562] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58db1f47-2b3d-4212-804b-a0afcd50c883 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.594345] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Reconfiguring VM instance instance-0000004d to attach disk [datastore2] 2d02adff-9fbf-4889-99e4-4efde5a51b33/2d02adff-9fbf-4889-99e4-4efde5a51b33.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 915.594704] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c0a4e5cb-41dd-4312-b971-de76926f6a2f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.622425] env[68674]: DEBUG oslo_vmware.api [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a841fb-5635-dcf8-4cf8-b7a8eca09ab3, 'name': SearchDatastore_Task, 'duration_secs': 0.009582} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.623715] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 915.623989] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 245089a5-929d-49b0-aa36-749d342e8473/245089a5-929d-49b0-aa36-749d342e8473.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 915.624558] env[68674]: DEBUG oslo_vmware.api [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 915.624558] env[68674]: value = "task-3240321" [ 915.624558] env[68674]: _type = "Task" [ 915.624558] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.624756] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6dea0043-d286-4d30-98a6-b992efbc9d18 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.635385] env[68674]: DEBUG oslo_vmware.api [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240321, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.637442] env[68674]: DEBUG oslo_vmware.api [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Waiting for the task: (returnval){ [ 915.637442] env[68674]: value = "task-3240322" [ 915.637442] env[68674]: _type = "Task" [ 915.637442] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.647228] env[68674]: DEBUG oslo_vmware.api [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': task-3240322, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.706028] env[68674]: ERROR nova.scheduler.client.report [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] [req-d3537f9d-0c87-42db-a254-64c4c7aca862] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ade3f042-7427-494b-9654-0b65e074850c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d3537f9d-0c87-42db-a254-64c4c7aca862"}]} [ 915.723674] env[68674]: DEBUG nova.scheduler.client.report [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Refreshing inventories for resource provider ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 915.744395] env[68674]: DEBUG nova.scheduler.client.report [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Updating ProviderTree inventory for provider ade3f042-7427-494b-9654-0b65e074850c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 915.744646] env[68674]: DEBUG nova.compute.provider_tree [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 915.750137] env[68674]: DEBUG nova.compute.manager [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 915.757284] env[68674]: DEBUG nova.scheduler.client.report [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Refreshing aggregate associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, aggregates: None {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 915.777878] env[68674]: DEBUG nova.scheduler.client.report [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Refreshing trait associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 915.861920] env[68674]: DEBUG nova.objects.instance [None req-04d69c16-5651-4524-95d4-a76c1e8a278f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lazy-loading 'flavor' on Instance uuid 3d85c8c4-f09c-4f75-aff5-9a49d84ae006 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 916.118400] env[68674]: DEBUG oslo_concurrency.lockutils [None req-78c4d3df-0db8-4e17-bfc5-df75502c4891 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Acquiring lock "c4fd04a7-2b11-4c4b-84d1-53edc1e3f035" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 916.118682] env[68674]: DEBUG oslo_concurrency.lockutils [None req-78c4d3df-0db8-4e17-bfc5-df75502c4891 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Lock "c4fd04a7-2b11-4c4b-84d1-53edc1e3f035" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.120865] env[68674]: DEBUG oslo_concurrency.lockutils [None req-78c4d3df-0db8-4e17-bfc5-df75502c4891 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Acquiring lock "c4fd04a7-2b11-4c4b-84d1-53edc1e3f035-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 916.120865] env[68674]: DEBUG oslo_concurrency.lockutils [None req-78c4d3df-0db8-4e17-bfc5-df75502c4891 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Lock "c4fd04a7-2b11-4c4b-84d1-53edc1e3f035-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.120865] env[68674]: DEBUG oslo_concurrency.lockutils [None req-78c4d3df-0db8-4e17-bfc5-df75502c4891 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Lock "c4fd04a7-2b11-4c4b-84d1-53edc1e3f035-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.125793] env[68674]: INFO nova.compute.manager [None req-78c4d3df-0db8-4e17-bfc5-df75502c4891 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Terminating instance [ 916.141548] env[68674]: DEBUG oslo_vmware.api [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240321, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.157583] env[68674]: DEBUG oslo_vmware.api [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': task-3240322, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.473105} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.158138] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 245089a5-929d-49b0-aa36-749d342e8473/245089a5-929d-49b0-aa36-749d342e8473.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 916.158422] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 916.158740] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6dc5e435-3fcf-477d-923e-bb05bde124ef {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.165759] env[68674]: DEBUG oslo_vmware.api [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Waiting for the task: (returnval){ [ 916.165759] env[68674]: value = "task-3240323" [ 916.165759] env[68674]: _type = "Task" [ 916.165759] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.178385] env[68674]: DEBUG oslo_vmware.api [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': task-3240323, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.242035] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d3794e0-f537-44f1-a9c5-8fe285e74a84 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.250144] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee30f59f-40a6-4f96-aff6-3174525ce1d8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.284798] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 916.286205] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d699c243-ee99-4d2f-aa62-af5c34b095d5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.295175] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b79cb9ca-a782-41af-af86-2ad5831cd708 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.309513] env[68674]: DEBUG nova.compute.provider_tree [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 916.366514] env[68674]: DEBUG oslo_concurrency.lockutils [None req-04d69c16-5651-4524-95d4-a76c1e8a278f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "refresh_cache-3d85c8c4-f09c-4f75-aff5-9a49d84ae006" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.366637] env[68674]: DEBUG oslo_concurrency.lockutils [None req-04d69c16-5651-4524-95d4-a76c1e8a278f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquired lock "refresh_cache-3d85c8c4-f09c-4f75-aff5-9a49d84ae006" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 916.366813] env[68674]: DEBUG nova.network.neutron [None req-04d69c16-5651-4524-95d4-a76c1e8a278f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 916.366991] env[68674]: DEBUG nova.objects.instance [None req-04d69c16-5651-4524-95d4-a76c1e8a278f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lazy-loading 'info_cache' on Instance uuid 3d85c8c4-f09c-4f75-aff5-9a49d84ae006 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 916.637557] env[68674]: DEBUG nova.compute.manager [None req-78c4d3df-0db8-4e17-bfc5-df75502c4891 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 916.637887] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-78c4d3df-0db8-4e17-bfc5-df75502c4891 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 916.638217] env[68674]: DEBUG oslo_vmware.api [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240321, 'name': ReconfigVM_Task, 'duration_secs': 0.730627} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.638925] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f2e8b0a-4be2-415e-9114-bc94fd9b26b5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.641444] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Reconfigured VM instance instance-0000004d to attach disk [datastore2] 2d02adff-9fbf-4889-99e4-4efde5a51b33/2d02adff-9fbf-4889-99e4-4efde5a51b33.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 916.642370] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-86cd5c8d-802c-45d3-8bd5-f0713c25f189 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.647574] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-78c4d3df-0db8-4e17-bfc5-df75502c4891 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 916.648572] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3e1157cf-7a4a-44f3-b3bc-c535422ee06a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.650034] env[68674]: DEBUG oslo_vmware.api [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 916.650034] env[68674]: value = "task-3240324" [ 916.650034] env[68674]: _type = "Task" [ 916.650034] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.655098] env[68674]: DEBUG oslo_vmware.api [None req-78c4d3df-0db8-4e17-bfc5-df75502c4891 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Waiting for the task: (returnval){ [ 916.655098] env[68674]: value = "task-3240325" [ 916.655098] env[68674]: _type = "Task" [ 916.655098] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.660761] env[68674]: DEBUG oslo_vmware.api [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240324, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.665549] env[68674]: DEBUG oslo_vmware.api [None req-78c4d3df-0db8-4e17-bfc5-df75502c4891 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3240325, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.675873] env[68674]: DEBUG oslo_vmware.api [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': task-3240323, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064147} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.676164] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 916.676940] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46687df3-754e-40eb-879f-3d6d4a7506c2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.698635] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Reconfiguring VM instance instance-0000004e to attach disk [datastore2] 245089a5-929d-49b0-aa36-749d342e8473/245089a5-929d-49b0-aa36-749d342e8473.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 916.698940] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-70c16e39-31ad-475c-8962-c909ce7a6429 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.718143] env[68674]: DEBUG oslo_vmware.api [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Waiting for the task: (returnval){ [ 916.718143] env[68674]: value = "task-3240326" [ 916.718143] env[68674]: _type = "Task" [ 916.718143] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.726843] env[68674]: DEBUG oslo_vmware.api [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': task-3240326, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.847367] env[68674]: DEBUG nova.scheduler.client.report [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Updated inventory for provider ade3f042-7427-494b-9654-0b65e074850c with generation 119 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 916.847624] env[68674]: DEBUG nova.compute.provider_tree [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Updating resource provider ade3f042-7427-494b-9654-0b65e074850c generation from 119 to 120 during operation: update_inventory {{(pid=68674) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 916.847815] env[68674]: DEBUG nova.compute.provider_tree [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 916.870374] env[68674]: DEBUG nova.objects.base [None req-04d69c16-5651-4524-95d4-a76c1e8a278f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Object Instance<3d85c8c4-f09c-4f75-aff5-9a49d84ae006> lazy-loaded attributes: flavor,info_cache {{(pid=68674) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 917.162071] env[68674]: DEBUG oslo_vmware.api [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240324, 'name': Rename_Task, 'duration_secs': 0.203762} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.162773] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 917.163070] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-04b51dc8-8e04-4523-9209-c1bd1768b83b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.167169] env[68674]: DEBUG oslo_vmware.api [None req-78c4d3df-0db8-4e17-bfc5-df75502c4891 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3240325, 'name': PowerOffVM_Task, 'duration_secs': 0.244245} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.168025] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-78c4d3df-0db8-4e17-bfc5-df75502c4891 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 917.168025] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-78c4d3df-0db8-4e17-bfc5-df75502c4891 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 917.168222] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b4d927f0-b2c7-477c-b064-f4590aefca43 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.171979] env[68674]: DEBUG oslo_vmware.api [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 917.171979] env[68674]: value = "task-3240327" [ 917.171979] env[68674]: _type = "Task" [ 917.171979] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.180866] env[68674]: DEBUG oslo_vmware.api [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240327, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.229209] env[68674]: DEBUG oslo_vmware.api [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': task-3240326, 'name': ReconfigVM_Task, 'duration_secs': 0.309268} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.229507] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Reconfigured VM instance instance-0000004e to attach disk [datastore2] 245089a5-929d-49b0-aa36-749d342e8473/245089a5-929d-49b0-aa36-749d342e8473.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 917.230145] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9ae9de0b-3a67-4177-97ec-edb6c60e7278 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.236814] env[68674]: DEBUG oslo_vmware.api [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Waiting for the task: (returnval){ [ 917.236814] env[68674]: value = "task-3240329" [ 917.236814] env[68674]: _type = "Task" [ 917.236814] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.245239] env[68674]: DEBUG oslo_vmware.api [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': task-3240329, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.353623] env[68674]: DEBUG oslo_concurrency.lockutils [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.234s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 917.356235] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.129s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 917.357857] env[68674]: INFO nova.compute.claims [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 917.379441] env[68674]: INFO nova.scheduler.client.report [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Deleted allocations for instance f147b483-9384-4fc1-996e-e8fb035c1942 [ 917.649768] env[68674]: DEBUG nova.network.neutron [None req-04d69c16-5651-4524-95d4-a76c1e8a278f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Updating instance_info_cache with network_info: [{"id": "1b7ed5a9-214f-4011-b73e-63954c02e25e", "address": "fa:16:3e:4f:37:80", "network": {"id": "cd9a6296-fa96-4117-b8b5-3884d0d16745", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1543887384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61ea6bfeb37d470a970e9c98e4827ade", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b7ed5a9-21", "ovs_interfaceid": "1b7ed5a9-214f-4011-b73e-63954c02e25e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.684348] env[68674]: DEBUG oslo_vmware.api [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240327, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.747134] env[68674]: DEBUG oslo_vmware.api [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': task-3240329, 'name': Rename_Task, 'duration_secs': 0.144454} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.747359] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 917.747606] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7377ba55-7168-4512-8ca8-e726bd39ede8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.754108] env[68674]: DEBUG oslo_vmware.api [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Waiting for the task: (returnval){ [ 917.754108] env[68674]: value = "task-3240330" [ 917.754108] env[68674]: _type = "Task" [ 917.754108] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.761603] env[68674]: DEBUG oslo_vmware.api [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': task-3240330, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.887998] env[68674]: DEBUG oslo_concurrency.lockutils [None req-29162cd7-91a1-4388-8d93-c7c58b23151d tempest-ImagesOneServerNegativeTestJSON-1416053422 tempest-ImagesOneServerNegativeTestJSON-1416053422-project-member] Lock "f147b483-9384-4fc1-996e-e8fb035c1942" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.130s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 918.152052] env[68674]: DEBUG oslo_concurrency.lockutils [None req-04d69c16-5651-4524-95d4-a76c1e8a278f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Releasing lock "refresh_cache-3d85c8c4-f09c-4f75-aff5-9a49d84ae006" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 918.183875] env[68674]: DEBUG oslo_vmware.api [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240327, 'name': PowerOnVM_Task, 'duration_secs': 0.619113} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.183875] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 918.183875] env[68674]: INFO nova.compute.manager [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Took 9.38 seconds to spawn the instance on the hypervisor. [ 918.184105] env[68674]: DEBUG nova.compute.manager [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 918.184852] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eb15eed-c7ea-4081-b9f3-c76a5b24ab2d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.263772] env[68674]: DEBUG oslo_vmware.api [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': task-3240330, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.706633] env[68674]: INFO nova.compute.manager [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Took 30.58 seconds to build instance. [ 918.723699] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-78c4d3df-0db8-4e17-bfc5-df75502c4891 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 918.724072] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-78c4d3df-0db8-4e17-bfc5-df75502c4891 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 918.724182] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-78c4d3df-0db8-4e17-bfc5-df75502c4891 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Deleting the datastore file [datastore1] c4fd04a7-2b11-4c4b-84d1-53edc1e3f035 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 918.724773] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-94169c84-3e59-4540-87bc-8142f9c0881b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.731439] env[68674]: DEBUG oslo_vmware.api [None req-78c4d3df-0db8-4e17-bfc5-df75502c4891 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Waiting for the task: (returnval){ [ 918.731439] env[68674]: value = "task-3240331" [ 918.731439] env[68674]: _type = "Task" [ 918.731439] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.744264] env[68674]: DEBUG oslo_vmware.api [None req-78c4d3df-0db8-4e17-bfc5-df75502c4891 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3240331, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.764178] env[68674]: DEBUG oslo_vmware.api [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': task-3240330, 'name': PowerOnVM_Task, 'duration_secs': 0.549226} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.764311] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 918.764414] env[68674]: INFO nova.compute.manager [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Took 5.29 seconds to spawn the instance on the hypervisor. [ 918.764670] env[68674]: DEBUG nova.compute.manager [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 918.765398] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df7d623b-a6e1-4ae5-8315-47e9e4752efc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.769147] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edcfc09d-7820-4b0c-812c-9955a054a090 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.780357] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea128f80-f458-470e-9e4f-5d4fb5fb6a17 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.814025] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0f15fd9-1e74-4f72-ba6c-c938144eef9a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.822204] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9718e190-a767-44c0-9e6a-70eefad52eed {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.836624] env[68674]: DEBUG nova.compute.provider_tree [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 919.158873] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-04d69c16-5651-4524-95d4-a76c1e8a278f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 919.159127] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bdc45eea-26c9-4279-a18f-feadea0b7cfb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.166406] env[68674]: DEBUG oslo_vmware.api [None req-04d69c16-5651-4524-95d4-a76c1e8a278f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 919.166406] env[68674]: value = "task-3240332" [ 919.166406] env[68674]: _type = "Task" [ 919.166406] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.174431] env[68674]: DEBUG oslo_vmware.api [None req-04d69c16-5651-4524-95d4-a76c1e8a278f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240332, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.209203] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6702c8e0-f974-4a82-ba95-c0adaefea384 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "2d02adff-9fbf-4889-99e4-4efde5a51b33" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.094s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 919.242300] env[68674]: DEBUG oslo_vmware.api [None req-78c4d3df-0db8-4e17-bfc5-df75502c4891 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Task: {'id': task-3240331, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.285874} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.242564] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-78c4d3df-0db8-4e17-bfc5-df75502c4891 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 919.242800] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-78c4d3df-0db8-4e17-bfc5-df75502c4891 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 919.242988] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-78c4d3df-0db8-4e17-bfc5-df75502c4891 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 919.243176] env[68674]: INFO nova.compute.manager [None req-78c4d3df-0db8-4e17-bfc5-df75502c4891 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Took 2.61 seconds to destroy the instance on the hypervisor. [ 919.243450] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-78c4d3df-0db8-4e17-bfc5-df75502c4891 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 919.243595] env[68674]: DEBUG nova.compute.manager [-] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 919.243707] env[68674]: DEBUG nova.network.neutron [-] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 919.307769] env[68674]: INFO nova.compute.manager [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Took 26.52 seconds to build instance. [ 919.339599] env[68674]: DEBUG nova.scheduler.client.report [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 919.676172] env[68674]: DEBUG oslo_vmware.api [None req-04d69c16-5651-4524-95d4-a76c1e8a278f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240332, 'name': PowerOnVM_Task, 'duration_secs': 0.435927} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.676537] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-04d69c16-5651-4524-95d4-a76c1e8a278f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 919.676795] env[68674]: DEBUG nova.compute.manager [None req-04d69c16-5651-4524-95d4-a76c1e8a278f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 919.677830] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aabe90e8-ef81-4a31-aaa4-11800538bd8b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.806882] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8cd595cd-d460-4992-b758-e996aa8562f5 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Lock "245089a5-929d-49b0-aa36-749d342e8473" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.036s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 919.844620] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.488s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 919.845314] env[68674]: DEBUG nova.compute.manager [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 919.848280] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4ab20d4c-6d24-4033-8d9f-a0489d0c1343 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.190s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 919.848672] env[68674]: DEBUG nova.objects.instance [None req-4ab20d4c-6d24-4033-8d9f-a0489d0c1343 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lazy-loading 'resources' on Instance uuid f6d28c5e-fe32-4c53-98ac-747a1b79e6c4 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 920.284731] env[68674]: INFO nova.compute.manager [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Rebuilding instance [ 920.329193] env[68674]: DEBUG nova.compute.manager [req-17e7c8a8-b307-4ffb-b785-ae53c9215ff2 req-2d2dabaa-236d-4cf2-a3b8-8cb3bcdd4dab service nova] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Received event network-changed-e6ae43a2-a24c-4187-aba3-c546140142b9 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 920.329193] env[68674]: DEBUG nova.compute.manager [req-17e7c8a8-b307-4ffb-b785-ae53c9215ff2 req-2d2dabaa-236d-4cf2-a3b8-8cb3bcdd4dab service nova] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Refreshing instance network info cache due to event network-changed-e6ae43a2-a24c-4187-aba3-c546140142b9. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 920.329193] env[68674]: DEBUG oslo_concurrency.lockutils [req-17e7c8a8-b307-4ffb-b785-ae53c9215ff2 req-2d2dabaa-236d-4cf2-a3b8-8cb3bcdd4dab service nova] Acquiring lock "refresh_cache-2d02adff-9fbf-4889-99e4-4efde5a51b33" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 920.329193] env[68674]: DEBUG oslo_concurrency.lockutils [req-17e7c8a8-b307-4ffb-b785-ae53c9215ff2 req-2d2dabaa-236d-4cf2-a3b8-8cb3bcdd4dab service nova] Acquired lock "refresh_cache-2d02adff-9fbf-4889-99e4-4efde5a51b33" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 920.329193] env[68674]: DEBUG nova.network.neutron [req-17e7c8a8-b307-4ffb-b785-ae53c9215ff2 req-2d2dabaa-236d-4cf2-a3b8-8cb3bcdd4dab service nova] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Refreshing network info cache for port e6ae43a2-a24c-4187-aba3-c546140142b9 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 920.349058] env[68674]: DEBUG nova.compute.manager [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 920.353032] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fc1e92b-3c3c-4cf0-a7b5-fcbbaa64f991 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.357568] env[68674]: DEBUG nova.compute.utils [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 920.359424] env[68674]: DEBUG nova.compute.manager [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 920.359706] env[68674]: DEBUG nova.network.neutron [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 920.370999] env[68674]: DEBUG nova.compute.manager [req-72728b4e-da79-435d-918a-7ac06f2e5ced req-eec74105-887c-4f17-88bd-169c0adc9180 service nova] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Received event network-vif-deleted-4b49219f-9f62-4839-b5a2-eb1116da215c {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 920.371711] env[68674]: INFO nova.compute.manager [req-72728b4e-da79-435d-918a-7ac06f2e5ced req-eec74105-887c-4f17-88bd-169c0adc9180 service nova] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Neutron deleted interface 4b49219f-9f62-4839-b5a2-eb1116da215c; detaching it from the instance and deleting it from the info cache [ 920.372071] env[68674]: DEBUG nova.network.neutron [req-72728b4e-da79-435d-918a-7ac06f2e5ced req-eec74105-887c-4f17-88bd-169c0adc9180 service nova] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 920.442512] env[68674]: DEBUG nova.policy [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1891413e35f845a2b761f474df3eb6c8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3dceab4b22c34737bc85ee5a5ded00d3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 920.777700] env[68674]: DEBUG nova.network.neutron [-] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 920.829531] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8f0073ce-00c0-41ea-9917-5642b4bff7c0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "3d85c8c4-f09c-4f75-aff5-9a49d84ae006" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 920.829864] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8f0073ce-00c0-41ea-9917-5642b4bff7c0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "3d85c8c4-f09c-4f75-aff5-9a49d84ae006" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 920.830095] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8f0073ce-00c0-41ea-9917-5642b4bff7c0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "3d85c8c4-f09c-4f75-aff5-9a49d84ae006-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 920.830292] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8f0073ce-00c0-41ea-9917-5642b4bff7c0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "3d85c8c4-f09c-4f75-aff5-9a49d84ae006-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 920.830462] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8f0073ce-00c0-41ea-9917-5642b4bff7c0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "3d85c8c4-f09c-4f75-aff5-9a49d84ae006-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 920.834097] env[68674]: INFO nova.compute.manager [None req-8f0073ce-00c0-41ea-9917-5642b4bff7c0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Terminating instance [ 920.843502] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-952006fa-10b7-4c3a-bb1d-76a10e5a0f05 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.852954] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e782135e-14b6-4782-be9c-c7e797ad95f1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.885224] env[68674]: DEBUG nova.compute.manager [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 920.897348] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-db060b13-9e7b-44af-b6f5-7758e79b02ef {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.902021] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36fe372e-dbe8-40cf-8c6f-4152395e760d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.916868] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-533f091e-fbed-471f-8ce5-6a040c9405e6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.923870] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-035d4b5d-4f5f-40c5-9508-eb1af03d1cac {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.950873] env[68674]: DEBUG nova.compute.provider_tree [None req-4ab20d4c-6d24-4033-8d9f-a0489d0c1343 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 920.962055] env[68674]: DEBUG nova.compute.manager [req-72728b4e-da79-435d-918a-7ac06f2e5ced req-eec74105-887c-4f17-88bd-169c0adc9180 service nova] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Detach interface failed, port_id=4b49219f-9f62-4839-b5a2-eb1116da215c, reason: Instance c4fd04a7-2b11-4c4b-84d1-53edc1e3f035 could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 920.963224] env[68674]: DEBUG nova.scheduler.client.report [None req-4ab20d4c-6d24-4033-8d9f-a0489d0c1343 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 921.011085] env[68674]: DEBUG nova.network.neutron [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Successfully created port: f3cfd0df-ecd9-4b39-a765-31cf7a9bf316 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 921.234312] env[68674]: DEBUG nova.network.neutron [req-17e7c8a8-b307-4ffb-b785-ae53c9215ff2 req-2d2dabaa-236d-4cf2-a3b8-8cb3bcdd4dab service nova] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Updated VIF entry in instance network info cache for port e6ae43a2-a24c-4187-aba3-c546140142b9. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 921.234966] env[68674]: DEBUG nova.network.neutron [req-17e7c8a8-b307-4ffb-b785-ae53c9215ff2 req-2d2dabaa-236d-4cf2-a3b8-8cb3bcdd4dab service nova] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Updating instance_info_cache with network_info: [{"id": "e6ae43a2-a24c-4187-aba3-c546140142b9", "address": "fa:16:3e:e6:44:6b", "network": {"id": "eae87694-bbf6-4eed-9305-26be80e0529b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1262353116-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c958fcb56a934ef7919b76aa2a193429", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6ae43a2-a2", "ovs_interfaceid": "e6ae43a2-a24c-4187-aba3-c546140142b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.280439] env[68674]: INFO nova.compute.manager [-] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Took 2.04 seconds to deallocate network for instance. [ 921.339803] env[68674]: DEBUG nova.compute.manager [None req-8f0073ce-00c0-41ea-9917-5642b4bff7c0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 921.340102] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8f0073ce-00c0-41ea-9917-5642b4bff7c0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 921.341333] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c3e036f-7232-4bc2-8a49-cae2fd473742 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.350085] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f0073ce-00c0-41ea-9917-5642b4bff7c0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 921.350345] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-743a0af7-7952-4625-878b-e5b6cc9091f0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.356980] env[68674]: DEBUG oslo_vmware.api [None req-8f0073ce-00c0-41ea-9917-5642b4bff7c0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 921.356980] env[68674]: value = "task-3240333" [ 921.356980] env[68674]: _type = "Task" [ 921.356980] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.365449] env[68674]: DEBUG oslo_vmware.api [None req-8f0073ce-00c0-41ea-9917-5642b4bff7c0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240333, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.401241] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 921.401910] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f971c3c1-cf75-4d9a-b58b-5add0b8c11f7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.409582] env[68674]: DEBUG oslo_vmware.api [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Waiting for the task: (returnval){ [ 921.409582] env[68674]: value = "task-3240334" [ 921.409582] env[68674]: _type = "Task" [ 921.409582] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.420728] env[68674]: DEBUG oslo_vmware.api [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': task-3240334, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.471613] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4ab20d4c-6d24-4033-8d9f-a0489d0c1343 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.623s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.473816] env[68674]: DEBUG oslo_concurrency.lockutils [None req-65038564-642d-4b80-915c-cd495d336380 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.743s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 921.474257] env[68674]: DEBUG nova.objects.instance [None req-65038564-642d-4b80-915c-cd495d336380 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lazy-loading 'resources' on Instance uuid f69c5fcf-6d25-48a5-a154-c3632c76175a {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 921.501269] env[68674]: INFO nova.scheduler.client.report [None req-4ab20d4c-6d24-4033-8d9f-a0489d0c1343 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Deleted allocations for instance f6d28c5e-fe32-4c53-98ac-747a1b79e6c4 [ 921.737257] env[68674]: DEBUG oslo_concurrency.lockutils [req-17e7c8a8-b307-4ffb-b785-ae53c9215ff2 req-2d2dabaa-236d-4cf2-a3b8-8cb3bcdd4dab service nova] Releasing lock "refresh_cache-2d02adff-9fbf-4889-99e4-4efde5a51b33" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 921.788680] env[68674]: DEBUG oslo_concurrency.lockutils [None req-78c4d3df-0db8-4e17-bfc5-df75502c4891 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 921.867154] env[68674]: DEBUG oslo_vmware.api [None req-8f0073ce-00c0-41ea-9917-5642b4bff7c0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240333, 'name': PowerOffVM_Task, 'duration_secs': 0.218003} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.867471] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f0073ce-00c0-41ea-9917-5642b4bff7c0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 921.867644] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8f0073ce-00c0-41ea-9917-5642b4bff7c0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 921.867896] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1f0144b7-bb76-4f68-aa8b-bf4c1c3574fe {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.906039] env[68674]: DEBUG nova.compute.manager [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 921.918777] env[68674]: DEBUG oslo_vmware.api [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': task-3240334, 'name': PowerOffVM_Task, 'duration_secs': 0.145363} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.919109] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 921.919911] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 921.920642] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ee6927-a1b5-4b31-8b03-bbf1b05f6c84 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.929471] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8f0073ce-00c0-41ea-9917-5642b4bff7c0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 921.929704] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8f0073ce-00c0-41ea-9917-5642b4bff7c0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 921.929890] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f0073ce-00c0-41ea-9917-5642b4bff7c0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Deleting the datastore file [datastore1] 3d85c8c4-f09c-4f75-aff5-9a49d84ae006 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 921.933940] env[68674]: DEBUG nova.virt.hardware [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 921.934192] env[68674]: DEBUG nova.virt.hardware [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 921.934385] env[68674]: DEBUG nova.virt.hardware [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 921.934587] env[68674]: DEBUG nova.virt.hardware [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 921.934737] env[68674]: DEBUG nova.virt.hardware [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 921.934887] env[68674]: DEBUG nova.virt.hardware [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 921.935106] env[68674]: DEBUG nova.virt.hardware [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 921.935268] env[68674]: DEBUG nova.virt.hardware [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 921.935436] env[68674]: DEBUG nova.virt.hardware [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 921.935595] env[68674]: DEBUG nova.virt.hardware [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 921.935765] env[68674]: DEBUG nova.virt.hardware [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 921.936066] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fffe81fa-d705-4840-accd-899d3e93d6e4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.938025] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 921.938619] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a1da50f-3561-473d-ac5f-8fa3793ca7d4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.940961] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9b423671-afe3-4f75-a578-c196aaa0ca55 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.947999] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5564e68b-8d62-419d-b58f-c263c1d6bd25 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.951738] env[68674]: DEBUG oslo_vmware.api [None req-8f0073ce-00c0-41ea-9917-5642b4bff7c0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 921.951738] env[68674]: value = "task-3240336" [ 921.951738] env[68674]: _type = "Task" [ 921.951738] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.971976] env[68674]: DEBUG oslo_vmware.api [None req-8f0073ce-00c0-41ea-9917-5642b4bff7c0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240336, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.971976] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 921.971976] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 921.972144] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Deleting the datastore file [datastore2] 245089a5-929d-49b0-aa36-749d342e8473 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 921.972309] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e3337bfa-9155-441c-a265-9bc819eb3ffb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.977960] env[68674]: DEBUG oslo_vmware.api [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Waiting for the task: (returnval){ [ 921.977960] env[68674]: value = "task-3240338" [ 921.977960] env[68674]: _type = "Task" [ 921.977960] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.988152] env[68674]: DEBUG oslo_vmware.api [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': task-3240338, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.009309] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4ab20d4c-6d24-4033-8d9f-a0489d0c1343 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "f6d28c5e-fe32-4c53-98ac-747a1b79e6c4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.050s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 922.327326] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-519e715b-1d9b-4842-913a-9951b4caf1d4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.335765] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a53316dd-9aaa-476b-a82d-710e0524eab6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.366016] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3691cc67-2b6f-4027-b800-6ce96c2df553 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.373389] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fed7386-a57c-4ba1-9c4d-248fa0f90387 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.386784] env[68674]: DEBUG nova.compute.provider_tree [None req-65038564-642d-4b80-915c-cd495d336380 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 922.461983] env[68674]: DEBUG oslo_vmware.api [None req-8f0073ce-00c0-41ea-9917-5642b4bff7c0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240336, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.168401} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.462261] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f0073ce-00c0-41ea-9917-5642b4bff7c0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 922.462442] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8f0073ce-00c0-41ea-9917-5642b4bff7c0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 922.462616] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8f0073ce-00c0-41ea-9917-5642b4bff7c0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 922.462857] env[68674]: INFO nova.compute.manager [None req-8f0073ce-00c0-41ea-9917-5642b4bff7c0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Took 1.12 seconds to destroy the instance on the hypervisor. [ 922.463117] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8f0073ce-00c0-41ea-9917-5642b4bff7c0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 922.463303] env[68674]: DEBUG nova.compute.manager [-] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 922.463398] env[68674]: DEBUG nova.network.neutron [-] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 922.490919] env[68674]: DEBUG oslo_vmware.api [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': task-3240338, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.095301} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.491105] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 922.491269] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 922.491457] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 922.758854] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "6af32e52-f10e-47be-ab36-e130614ba9e8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 922.759252] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "6af32e52-f10e-47be-ab36-e130614ba9e8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 922.804628] env[68674]: DEBUG nova.compute.manager [req-e501f291-c43e-4393-a5fe-e040f32649cb req-ee5ab3b6-8c84-4737-9546-0667672a0e84 service nova] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Received event network-vif-plugged-f3cfd0df-ecd9-4b39-a765-31cf7a9bf316 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 922.804628] env[68674]: DEBUG oslo_concurrency.lockutils [req-e501f291-c43e-4393-a5fe-e040f32649cb req-ee5ab3b6-8c84-4737-9546-0667672a0e84 service nova] Acquiring lock "e684ec31-b5d9-458c-bbba-36ada7f275bd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 922.804628] env[68674]: DEBUG oslo_concurrency.lockutils [req-e501f291-c43e-4393-a5fe-e040f32649cb req-ee5ab3b6-8c84-4737-9546-0667672a0e84 service nova] Lock "e684ec31-b5d9-458c-bbba-36ada7f275bd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 922.804628] env[68674]: DEBUG oslo_concurrency.lockutils [req-e501f291-c43e-4393-a5fe-e040f32649cb req-ee5ab3b6-8c84-4737-9546-0667672a0e84 service nova] Lock "e684ec31-b5d9-458c-bbba-36ada7f275bd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 922.804628] env[68674]: DEBUG nova.compute.manager [req-e501f291-c43e-4393-a5fe-e040f32649cb req-ee5ab3b6-8c84-4737-9546-0667672a0e84 service nova] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] No waiting events found dispatching network-vif-plugged-f3cfd0df-ecd9-4b39-a765-31cf7a9bf316 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 922.804628] env[68674]: WARNING nova.compute.manager [req-e501f291-c43e-4393-a5fe-e040f32649cb req-ee5ab3b6-8c84-4737-9546-0667672a0e84 service nova] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Received unexpected event network-vif-plugged-f3cfd0df-ecd9-4b39-a765-31cf7a9bf316 for instance with vm_state building and task_state spawning. [ 922.890551] env[68674]: DEBUG nova.scheduler.client.report [None req-65038564-642d-4b80-915c-cd495d336380 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 922.942411] env[68674]: DEBUG nova.network.neutron [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Successfully updated port: f3cfd0df-ecd9-4b39-a765-31cf7a9bf316 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 923.212444] env[68674]: DEBUG nova.objects.instance [None req-dfc69f2d-4de5-4674-9b9e-7866f58f0103 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Lazy-loading 'flavor' on Instance uuid 5e3f667c-5d3a-4465-9186-779563087480 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 923.261799] env[68674]: DEBUG nova.compute.manager [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 923.396416] env[68674]: DEBUG oslo_concurrency.lockutils [None req-65038564-642d-4b80-915c-cd495d336380 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.923s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.399850] env[68674]: DEBUG oslo_concurrency.lockutils [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 19.172s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 923.426365] env[68674]: INFO nova.scheduler.client.report [None req-65038564-642d-4b80-915c-cd495d336380 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Deleted allocations for instance f69c5fcf-6d25-48a5-a154-c3632c76175a [ 923.445440] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "refresh_cache-e684ec31-b5d9-458c-bbba-36ada7f275bd" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.445651] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquired lock "refresh_cache-e684ec31-b5d9-458c-bbba-36ada7f275bd" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 923.445744] env[68674]: DEBUG nova.network.neutron [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 923.543072] env[68674]: DEBUG nova.virt.hardware [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 923.543241] env[68674]: DEBUG nova.virt.hardware [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 923.543412] env[68674]: DEBUG nova.virt.hardware [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 923.543577] env[68674]: DEBUG nova.virt.hardware [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 923.543746] env[68674]: DEBUG nova.virt.hardware [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 923.543908] env[68674]: DEBUG nova.virt.hardware [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 923.544153] env[68674]: DEBUG nova.virt.hardware [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 923.544290] env[68674]: DEBUG nova.virt.hardware [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 923.544457] env[68674]: DEBUG nova.virt.hardware [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 923.544618] env[68674]: DEBUG nova.virt.hardware [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 923.544796] env[68674]: DEBUG nova.virt.hardware [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 923.545696] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ece6011-4e56-48a7-b596-214651e0587a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.554099] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acb17aa0-6598-4708-9ee9-9c74d29b8c9f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.569358] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Instance VIF info [] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 923.575167] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 923.575453] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 923.575672] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-12306f9a-8e46-4cea-9eab-e75fb27f3be0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.593557] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 923.593557] env[68674]: value = "task-3240339" [ 923.593557] env[68674]: _type = "Task" [ 923.593557] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.601934] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240339, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.688126] env[68674]: DEBUG nova.network.neutron [-] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.715643] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dfc69f2d-4de5-4674-9b9e-7866f58f0103 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Acquiring lock "refresh_cache-5e3f667c-5d3a-4465-9186-779563087480" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.715888] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dfc69f2d-4de5-4674-9b9e-7866f58f0103 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Acquired lock "refresh_cache-5e3f667c-5d3a-4465-9186-779563087480" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 923.796329] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 923.909495] env[68674]: DEBUG nova.objects.instance [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lazy-loading 'migration_context' on Instance uuid 7aa58e2f-1202-4252-9c38-ce53084c573f {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 923.933267] env[68674]: DEBUG oslo_concurrency.lockutils [None req-65038564-642d-4b80-915c-cd495d336380 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "f69c5fcf-6d25-48a5-a154-c3632c76175a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.016s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 924.000010] env[68674]: DEBUG nova.network.neutron [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 924.103738] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240339, 'name': CreateVM_Task, 'duration_secs': 0.397728} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.103910] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 924.104439] env[68674]: DEBUG oslo_concurrency.lockutils [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.104607] env[68674]: DEBUG oslo_concurrency.lockutils [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 924.104934] env[68674]: DEBUG oslo_concurrency.lockutils [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 924.105202] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee0ba9a4-2b2f-4c25-9f6c-3534c938ade8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.111354] env[68674]: DEBUG oslo_vmware.api [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Waiting for the task: (returnval){ [ 924.111354] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52735a47-aed9-b828-e93a-5a347e7b6326" [ 924.111354] env[68674]: _type = "Task" [ 924.111354] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.118917] env[68674]: DEBUG oslo_vmware.api [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52735a47-aed9-b828-e93a-5a347e7b6326, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.191079] env[68674]: INFO nova.compute.manager [-] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Took 1.73 seconds to deallocate network for instance. [ 924.220857] env[68674]: DEBUG nova.network.neutron [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Updating instance_info_cache with network_info: [{"id": "f3cfd0df-ecd9-4b39-a765-31cf7a9bf316", "address": "fa:16:3e:84:33:d0", "network": {"id": "896418b0-8817-49dc-a965-e44ed5221810", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1185393062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3dceab4b22c34737bc85ee5a5ded00d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e7f6f41-f4eb-4832-a390-730fca1cf717", "external-id": "nsx-vlan-transportzone-724", "segmentation_id": 724, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3cfd0df-ec", "ovs_interfaceid": "f3cfd0df-ecd9-4b39-a765-31cf7a9bf316", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.294136] env[68674]: DEBUG nova.network.neutron [None req-dfc69f2d-4de5-4674-9b9e-7866f58f0103 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 924.623756] env[68674]: DEBUG oslo_vmware.api [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52735a47-aed9-b828-e93a-5a347e7b6326, 'name': SearchDatastore_Task, 'duration_secs': 0.010406} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.624026] env[68674]: DEBUG oslo_concurrency.lockutils [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 924.624260] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 924.624489] env[68674]: DEBUG oslo_concurrency.lockutils [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.624703] env[68674]: DEBUG oslo_concurrency.lockutils [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 924.624911] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 924.625190] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5015ce50-040e-4e3b-bdd5-1189ef7ac00a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.634567] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 924.634747] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 924.635480] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af0cb188-fd8c-4057-b128-29d50f5f03ba {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.640714] env[68674]: DEBUG oslo_vmware.api [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Waiting for the task: (returnval){ [ 924.640714] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]526e3ab0-007e-7ace-a31d-9394fb0f7698" [ 924.640714] env[68674]: _type = "Task" [ 924.640714] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.651092] env[68674]: DEBUG oslo_vmware.api [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]526e3ab0-007e-7ace-a31d-9394fb0f7698, 'name': SearchDatastore_Task, 'duration_secs': 0.00816} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.651788] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3734646c-4330-42d0-8641-be8f507e2076 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.658874] env[68674]: DEBUG oslo_vmware.api [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Waiting for the task: (returnval){ [ 924.658874] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d9fd0a-ecb7-7192-4c29-6a0f4c84d8ef" [ 924.658874] env[68674]: _type = "Task" [ 924.658874] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.666309] env[68674]: DEBUG oslo_vmware.api [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d9fd0a-ecb7-7192-4c29-6a0f4c84d8ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.697200] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8f0073ce-00c0-41ea-9917-5642b4bff7c0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 924.723836] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Releasing lock "refresh_cache-e684ec31-b5d9-458c-bbba-36ada7f275bd" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 924.724153] env[68674]: DEBUG nova.compute.manager [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Instance network_info: |[{"id": "f3cfd0df-ecd9-4b39-a765-31cf7a9bf316", "address": "fa:16:3e:84:33:d0", "network": {"id": "896418b0-8817-49dc-a965-e44ed5221810", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1185393062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3dceab4b22c34737bc85ee5a5ded00d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e7f6f41-f4eb-4832-a390-730fca1cf717", "external-id": "nsx-vlan-transportzone-724", "segmentation_id": 724, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3cfd0df-ec", "ovs_interfaceid": "f3cfd0df-ecd9-4b39-a765-31cf7a9bf316", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 924.724592] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:84:33:d0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8e7f6f41-f4eb-4832-a390-730fca1cf717', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f3cfd0df-ecd9-4b39-a765-31cf7a9bf316', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 924.732342] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 924.733356] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 924.734135] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39ec9bcd-34d9-45d9-ba55-0a8b03e90c46 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.736633] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-94dc7012-b97b-48c0-825e-c99c048c5436 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.755752] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68a65cb5-6762-4a4b-a340-036f7e739062 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.759671] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 924.759671] env[68674]: value = "task-3240340" [ 924.759671] env[68674]: _type = "Task" [ 924.759671] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.789410] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba5cf366-9bc2-4562-94b0-a9e3e8b921e6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.794996] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240340, 'name': CreateVM_Task} progress is 15%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.802771] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f00a3cac-732f-45c8-a604-828aa1d8412b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.817158] env[68674]: DEBUG nova.compute.provider_tree [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 924.852366] env[68674]: DEBUG nova.compute.manager [req-1c87bb9d-b5fd-4880-879f-a900c9ed8741 req-1c858f5d-bc34-4d35-b99d-d28aad0578ed service nova] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Received event network-changed-f3cfd0df-ecd9-4b39-a765-31cf7a9bf316 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 924.852586] env[68674]: DEBUG nova.compute.manager [req-1c87bb9d-b5fd-4880-879f-a900c9ed8741 req-1c858f5d-bc34-4d35-b99d-d28aad0578ed service nova] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Refreshing instance network info cache due to event network-changed-f3cfd0df-ecd9-4b39-a765-31cf7a9bf316. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 924.852825] env[68674]: DEBUG oslo_concurrency.lockutils [req-1c87bb9d-b5fd-4880-879f-a900c9ed8741 req-1c858f5d-bc34-4d35-b99d-d28aad0578ed service nova] Acquiring lock "refresh_cache-e684ec31-b5d9-458c-bbba-36ada7f275bd" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.852991] env[68674]: DEBUG oslo_concurrency.lockutils [req-1c87bb9d-b5fd-4880-879f-a900c9ed8741 req-1c858f5d-bc34-4d35-b99d-d28aad0578ed service nova] Acquired lock "refresh_cache-e684ec31-b5d9-458c-bbba-36ada7f275bd" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 924.853422] env[68674]: DEBUG nova.network.neutron [req-1c87bb9d-b5fd-4880-879f-a900c9ed8741 req-1c858f5d-bc34-4d35-b99d-d28aad0578ed service nova] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Refreshing network info cache for port f3cfd0df-ecd9-4b39-a765-31cf7a9bf316 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 925.169447] env[68674]: DEBUG oslo_vmware.api [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d9fd0a-ecb7-7192-4c29-6a0f4c84d8ef, 'name': SearchDatastore_Task, 'duration_secs': 0.008477} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.169726] env[68674]: DEBUG oslo_concurrency.lockutils [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 925.169989] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 245089a5-929d-49b0-aa36-749d342e8473/245089a5-929d-49b0-aa36-749d342e8473.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 925.170259] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7dbfb229-74a5-482a-b687-b134d5d87f0f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.179238] env[68674]: DEBUG oslo_vmware.api [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Waiting for the task: (returnval){ [ 925.179238] env[68674]: value = "task-3240341" [ 925.179238] env[68674]: _type = "Task" [ 925.179238] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.186933] env[68674]: DEBUG oslo_vmware.api [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': task-3240341, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.269624] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240340, 'name': CreateVM_Task, 'duration_secs': 0.394788} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.269796] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 925.270503] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.270692] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 925.271037] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 925.271302] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61c34e14-0b3c-4ffa-8b26-95445aff07ee {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.277680] env[68674]: DEBUG oslo_vmware.api [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 925.277680] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52575ee7-35b2-9d3b-92f4-69ad230ef2c3" [ 925.277680] env[68674]: _type = "Task" [ 925.277680] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.285209] env[68674]: DEBUG oslo_vmware.api [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52575ee7-35b2-9d3b-92f4-69ad230ef2c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.320012] env[68674]: DEBUG nova.network.neutron [None req-dfc69f2d-4de5-4674-9b9e-7866f58f0103 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Updating instance_info_cache with network_info: [{"id": "3bad0946-10af-40d0-a8c2-a5469f09cf39", "address": "fa:16:3e:92:b8:08", "network": {"id": "6e938754-bc83-4806-86a2-808cd64ac44a", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-632467165-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.166", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30b0da251a0d4f9c96f907b31ef9d5e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4d3f69a-b086-4c3b-b976-5a848b63dfc4", "external-id": "nsx-vlan-transportzone-627", "segmentation_id": 627, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3bad0946-10", "ovs_interfaceid": "3bad0946-10af-40d0-a8c2-a5469f09cf39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.321847] env[68674]: DEBUG nova.scheduler.client.report [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 925.600690] env[68674]: DEBUG nova.network.neutron [req-1c87bb9d-b5fd-4880-879f-a900c9ed8741 req-1c858f5d-bc34-4d35-b99d-d28aad0578ed service nova] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Updated VIF entry in instance network info cache for port f3cfd0df-ecd9-4b39-a765-31cf7a9bf316. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 925.600690] env[68674]: DEBUG nova.network.neutron [req-1c87bb9d-b5fd-4880-879f-a900c9ed8741 req-1c858f5d-bc34-4d35-b99d-d28aad0578ed service nova] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Updating instance_info_cache with network_info: [{"id": "f3cfd0df-ecd9-4b39-a765-31cf7a9bf316", "address": "fa:16:3e:84:33:d0", "network": {"id": "896418b0-8817-49dc-a965-e44ed5221810", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1185393062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3dceab4b22c34737bc85ee5a5ded00d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e7f6f41-f4eb-4832-a390-730fca1cf717", "external-id": "nsx-vlan-transportzone-724", "segmentation_id": 724, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3cfd0df-ec", "ovs_interfaceid": "f3cfd0df-ecd9-4b39-a765-31cf7a9bf316", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.694420] env[68674]: DEBUG oslo_vmware.api [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': task-3240341, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.731399] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bfe4bf99-19ec-4a0e-ae1e-411e390d78b9 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquiring lock "7d953e59-53c1-4041-a641-35c12c012f7e" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 925.731648] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bfe4bf99-19ec-4a0e-ae1e-411e390d78b9 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lock "7d953e59-53c1-4041-a641-35c12c012f7e" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 925.787787] env[68674]: DEBUG oslo_vmware.api [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52575ee7-35b2-9d3b-92f4-69ad230ef2c3, 'name': SearchDatastore_Task, 'duration_secs': 0.01079} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.788090] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 925.788327] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 925.788561] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.788710] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 925.788888] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 925.789155] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a6d0493a-664f-4d6f-873b-37b0fef858bc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.808966] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 925.809172] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 925.809887] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d35097d-1343-4ffb-99a2-766b3d694db9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.815381] env[68674]: DEBUG oslo_vmware.api [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 925.815381] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]520c9e57-2c53-5283-a8bc-95ce0ec15269" [ 925.815381] env[68674]: _type = "Task" [ 925.815381] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.822471] env[68674]: DEBUG oslo_vmware.api [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]520c9e57-2c53-5283-a8bc-95ce0ec15269, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.826050] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dfc69f2d-4de5-4674-9b9e-7866f58f0103 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Releasing lock "refresh_cache-5e3f667c-5d3a-4465-9186-779563087480" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 925.826268] env[68674]: DEBUG nova.compute.manager [None req-dfc69f2d-4de5-4674-9b9e-7866f58f0103 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Inject network info {{(pid=68674) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 925.826528] env[68674]: DEBUG nova.compute.manager [None req-dfc69f2d-4de5-4674-9b9e-7866f58f0103 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] network_info to inject: |[{"id": "3bad0946-10af-40d0-a8c2-a5469f09cf39", "address": "fa:16:3e:92:b8:08", "network": {"id": "6e938754-bc83-4806-86a2-808cd64ac44a", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-632467165-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.166", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30b0da251a0d4f9c96f907b31ef9d5e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4d3f69a-b086-4c3b-b976-5a848b63dfc4", "external-id": "nsx-vlan-transportzone-627", "segmentation_id": 627, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3bad0946-10", "ovs_interfaceid": "3bad0946-10af-40d0-a8c2-a5469f09cf39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 925.831348] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-dfc69f2d-4de5-4674-9b9e-7866f58f0103 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Reconfiguring VM instance to set the machine id {{(pid=68674) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 925.835052] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b629338f-0664-4861-b9a3-98a9abbbe759 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.851674] env[68674]: DEBUG oslo_vmware.api [None req-dfc69f2d-4de5-4674-9b9e-7866f58f0103 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Waiting for the task: (returnval){ [ 925.851674] env[68674]: value = "task-3240342" [ 925.851674] env[68674]: _type = "Task" [ 925.851674] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.859631] env[68674]: DEBUG oslo_vmware.api [None req-dfc69f2d-4de5-4674-9b9e-7866f58f0103 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Task: {'id': task-3240342, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.102996] env[68674]: DEBUG oslo_concurrency.lockutils [req-1c87bb9d-b5fd-4880-879f-a900c9ed8741 req-1c858f5d-bc34-4d35-b99d-d28aad0578ed service nova] Releasing lock "refresh_cache-e684ec31-b5d9-458c-bbba-36ada7f275bd" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 926.103268] env[68674]: DEBUG nova.compute.manager [req-1c87bb9d-b5fd-4880-879f-a900c9ed8741 req-1c858f5d-bc34-4d35-b99d-d28aad0578ed service nova] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Received event network-vif-deleted-1b7ed5a9-214f-4011-b73e-63954c02e25e {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 926.103463] env[68674]: DEBUG nova.compute.manager [req-1c87bb9d-b5fd-4880-879f-a900c9ed8741 req-1c858f5d-bc34-4d35-b99d-d28aad0578ed service nova] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Received event network-changed-3bad0946-10af-40d0-a8c2-a5469f09cf39 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 926.103627] env[68674]: DEBUG nova.compute.manager [req-1c87bb9d-b5fd-4880-879f-a900c9ed8741 req-1c858f5d-bc34-4d35-b99d-d28aad0578ed service nova] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Refreshing instance network info cache due to event network-changed-3bad0946-10af-40d0-a8c2-a5469f09cf39. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 926.103862] env[68674]: DEBUG oslo_concurrency.lockutils [req-1c87bb9d-b5fd-4880-879f-a900c9ed8741 req-1c858f5d-bc34-4d35-b99d-d28aad0578ed service nova] Acquiring lock "refresh_cache-5e3f667c-5d3a-4465-9186-779563087480" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.104014] env[68674]: DEBUG oslo_concurrency.lockutils [req-1c87bb9d-b5fd-4880-879f-a900c9ed8741 req-1c858f5d-bc34-4d35-b99d-d28aad0578ed service nova] Acquired lock "refresh_cache-5e3f667c-5d3a-4465-9186-779563087480" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 926.104185] env[68674]: DEBUG nova.network.neutron [req-1c87bb9d-b5fd-4880-879f-a900c9ed8741 req-1c858f5d-bc34-4d35-b99d-d28aad0578ed service nova] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Refreshing network info cache for port 3bad0946-10af-40d0-a8c2-a5469f09cf39 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 926.190918] env[68674]: DEBUG oslo_vmware.api [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': task-3240341, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.62131} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.191227] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 245089a5-929d-49b0-aa36-749d342e8473/245089a5-929d-49b0-aa36-749d342e8473.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 926.191300] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 926.192815] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3c484f06-4e79-422f-a4eb-e4d35ee36bcf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.198484] env[68674]: DEBUG oslo_vmware.api [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Waiting for the task: (returnval){ [ 926.198484] env[68674]: value = "task-3240343" [ 926.198484] env[68674]: _type = "Task" [ 926.198484] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.205630] env[68674]: DEBUG oslo_vmware.api [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': task-3240343, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.234791] env[68674]: DEBUG nova.compute.utils [None req-bfe4bf99-19ec-4a0e-ae1e-411e390d78b9 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 926.325533] env[68674]: DEBUG oslo_vmware.api [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]520c9e57-2c53-5283-a8bc-95ce0ec15269, 'name': SearchDatastore_Task, 'duration_secs': 0.008157} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.326359] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bceaf412-b8e0-4e92-a9aa-dc9123aef8ed {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.331009] env[68674]: DEBUG oslo_vmware.api [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 926.331009] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524348d0-a72d-80b3-fd05-334ffaccd3dc" [ 926.331009] env[68674]: _type = "Task" [ 926.331009] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.338108] env[68674]: DEBUG oslo_vmware.api [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524348d0-a72d-80b3-fd05-334ffaccd3dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.348028] env[68674]: DEBUG oslo_concurrency.lockutils [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.948s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 926.353190] env[68674]: DEBUG oslo_concurrency.lockutils [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.653s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 926.354352] env[68674]: INFO nova.compute.claims [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 926.370491] env[68674]: DEBUG oslo_vmware.api [None req-dfc69f2d-4de5-4674-9b9e-7866f58f0103 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Task: {'id': task-3240342, 'name': ReconfigVM_Task, 'duration_secs': 0.156934} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.370730] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-dfc69f2d-4de5-4674-9b9e-7866f58f0103 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Reconfigured VM instance to set the machine id {{(pid=68674) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 926.707622] env[68674]: DEBUG oslo_vmware.api [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': task-3240343, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069963} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.707880] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 926.708697] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0df6cd36-bee6-492e-bc52-9679846245ea {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.734432] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] 245089a5-929d-49b0-aa36-749d342e8473/245089a5-929d-49b0-aa36-749d342e8473.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 926.737761] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-43947aca-2d3f-4109-b13c-89bf9c29e689 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.758697] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bfe4bf99-19ec-4a0e-ae1e-411e390d78b9 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lock "7d953e59-53c1-4041-a641-35c12c012f7e" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.027s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 926.764482] env[68674]: DEBUG oslo_vmware.api [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Waiting for the task: (returnval){ [ 926.764482] env[68674]: value = "task-3240344" [ 926.764482] env[68674]: _type = "Task" [ 926.764482] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.772672] env[68674]: DEBUG oslo_vmware.api [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': task-3240344, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.790720] env[68674]: DEBUG nova.objects.instance [None req-9ed512fa-73f4-43ec-af31-6d921b025ef6 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Lazy-loading 'flavor' on Instance uuid 5e3f667c-5d3a-4465-9186-779563087480 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 926.841240] env[68674]: DEBUG oslo_vmware.api [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524348d0-a72d-80b3-fd05-334ffaccd3dc, 'name': SearchDatastore_Task, 'duration_secs': 0.039867} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.841547] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 926.841838] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] e684ec31-b5d9-458c-bbba-36ada7f275bd/e684ec31-b5d9-458c-bbba-36ada7f275bd.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 926.842107] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f24f4aff-5b4b-46b7-9ffc-3f2b7ea21027 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.848240] env[68674]: DEBUG oslo_vmware.api [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 926.848240] env[68674]: value = "task-3240345" [ 926.848240] env[68674]: _type = "Task" [ 926.848240] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.855885] env[68674]: DEBUG oslo_vmware.api [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240345, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.932305] env[68674]: DEBUG nova.network.neutron [req-1c87bb9d-b5fd-4880-879f-a900c9ed8741 req-1c858f5d-bc34-4d35-b99d-d28aad0578ed service nova] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Updated VIF entry in instance network info cache for port 3bad0946-10af-40d0-a8c2-a5469f09cf39. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 926.932754] env[68674]: DEBUG nova.network.neutron [req-1c87bb9d-b5fd-4880-879f-a900c9ed8741 req-1c858f5d-bc34-4d35-b99d-d28aad0578ed service nova] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Updating instance_info_cache with network_info: [{"id": "3bad0946-10af-40d0-a8c2-a5469f09cf39", "address": "fa:16:3e:92:b8:08", "network": {"id": "6e938754-bc83-4806-86a2-808cd64ac44a", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-632467165-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.166", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30b0da251a0d4f9c96f907b31ef9d5e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4d3f69a-b086-4c3b-b976-5a848b63dfc4", "external-id": "nsx-vlan-transportzone-627", "segmentation_id": 627, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3bad0946-10", "ovs_interfaceid": "3bad0946-10af-40d0-a8c2-a5469f09cf39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.274407] env[68674]: DEBUG oslo_vmware.api [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': task-3240344, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.296327] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9ed512fa-73f4-43ec-af31-6d921b025ef6 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Acquiring lock "refresh_cache-5e3f667c-5d3a-4465-9186-779563087480" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.357951] env[68674]: DEBUG oslo_vmware.api [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240345, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.454649} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.358239] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] e684ec31-b5d9-458c-bbba-36ada7f275bd/e684ec31-b5d9-458c-bbba-36ada7f275bd.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 927.358455] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 927.358699] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b7d285f4-e310-4e68-8ba5-aeb46dd224a0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.366813] env[68674]: DEBUG oslo_vmware.api [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 927.366813] env[68674]: value = "task-3240346" [ 927.366813] env[68674]: _type = "Task" [ 927.366813] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.379779] env[68674]: DEBUG oslo_vmware.api [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240346, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.437366] env[68674]: DEBUG oslo_concurrency.lockutils [req-1c87bb9d-b5fd-4880-879f-a900c9ed8741 req-1c858f5d-bc34-4d35-b99d-d28aad0578ed service nova] Releasing lock "refresh_cache-5e3f667c-5d3a-4465-9186-779563087480" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 927.438279] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9ed512fa-73f4-43ec-af31-6d921b025ef6 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Acquired lock "refresh_cache-5e3f667c-5d3a-4465-9186-779563087480" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 927.691706] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27b141ae-fede-4a98-baac-2c9df29280ff {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.700187] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c829f629-64fe-40a1-aa02-582b6ec9dca6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.729328] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5d00893-6e06-478e-a3ee-dbbc4550c8bb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.736589] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80e081e2-bc38-418f-a9bd-2760c9a9d529 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.752018] env[68674]: DEBUG nova.compute.provider_tree [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 927.773527] env[68674]: DEBUG oslo_vmware.api [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': task-3240344, 'name': ReconfigVM_Task, 'duration_secs': 0.573649} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.773809] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Reconfigured VM instance instance-0000004e to attach disk [datastore1] 245089a5-929d-49b0-aa36-749d342e8473/245089a5-929d-49b0-aa36-749d342e8473.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 927.774900] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9dce224b-2dc3-48b5-83ac-0a679b50b53f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.777461] env[68674]: DEBUG nova.network.neutron [None req-9ed512fa-73f4-43ec-af31-6d921b025ef6 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 927.781675] env[68674]: DEBUG oslo_vmware.api [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Waiting for the task: (returnval){ [ 927.781675] env[68674]: value = "task-3240347" [ 927.781675] env[68674]: _type = "Task" [ 927.781675] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.791075] env[68674]: DEBUG oslo_vmware.api [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': task-3240347, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.799373] env[68674]: DEBUG nova.compute.manager [req-4b627bfa-429e-4855-922d-6b1f4fadac01 req-6df0b031-968c-4552-a4b3-62d850dc7867 service nova] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Received event network-changed-3bad0946-10af-40d0-a8c2-a5469f09cf39 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 927.799632] env[68674]: DEBUG nova.compute.manager [req-4b627bfa-429e-4855-922d-6b1f4fadac01 req-6df0b031-968c-4552-a4b3-62d850dc7867 service nova] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Refreshing instance network info cache due to event network-changed-3bad0946-10af-40d0-a8c2-a5469f09cf39. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 927.799754] env[68674]: DEBUG oslo_concurrency.lockutils [req-4b627bfa-429e-4855-922d-6b1f4fadac01 req-6df0b031-968c-4552-a4b3-62d850dc7867 service nova] Acquiring lock "refresh_cache-5e3f667c-5d3a-4465-9186-779563087480" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.813623] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bfe4bf99-19ec-4a0e-ae1e-411e390d78b9 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquiring lock "7d953e59-53c1-4041-a641-35c12c012f7e" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 927.813847] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bfe4bf99-19ec-4a0e-ae1e-411e390d78b9 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lock "7d953e59-53c1-4041-a641-35c12c012f7e" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 927.814107] env[68674]: INFO nova.compute.manager [None req-bfe4bf99-19ec-4a0e-ae1e-411e390d78b9 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Attaching volume f5c1aae5-59a0-408e-93e1-e7b8fbdd4a32 to /dev/sdb [ 927.851773] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cdc9e31-4814-491f-abd8-d904faa9b2d8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.858400] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-151a24c0-1945-4129-8e7f-571ab55635dd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.873930] env[68674]: DEBUG nova.virt.block_device [None req-bfe4bf99-19ec-4a0e-ae1e-411e390d78b9 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Updating existing volume attachment record: 12d3869a-cd27-45c2-ba39-a9d271deb901 {{(pid=68674) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 927.880797] env[68674]: DEBUG oslo_vmware.api [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240346, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065958} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.881080] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 927.881959] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf298016-96a3-4123-a9e6-2b02308e5808 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.907803] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Reconfiguring VM instance instance-0000004f to attach disk [datastore2] e684ec31-b5d9-458c-bbba-36ada7f275bd/e684ec31-b5d9-458c-bbba-36ada7f275bd.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 927.908955] env[68674]: INFO nova.compute.manager [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Swapping old allocation on dict_keys(['ade3f042-7427-494b-9654-0b65e074850c']) held by migration 8d2ad03f-6e50-4194-b3d7-a98f0bd46666 for instance [ 927.909207] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aeabec7b-b524-46b5-a850-4a1da2dbb732 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.931522] env[68674]: DEBUG oslo_vmware.api [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 927.931522] env[68674]: value = "task-3240348" [ 927.931522] env[68674]: _type = "Task" [ 927.931522] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.941777] env[68674]: DEBUG oslo_vmware.api [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240348, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.953904] env[68674]: DEBUG nova.scheduler.client.report [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Overwriting current allocation {'allocations': {'ade3f042-7427-494b-9654-0b65e074850c': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 121}}, 'project_id': '22d2d9d6bfcd4f219b02b1356b36123e', 'user_id': 'a872f1b7c99f48bd821993386afdf84f', 'consumer_generation': 1} on consumer 7aa58e2f-1202-4252-9c38-ce53084c573f {{(pid=68674) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 928.036472] env[68674]: DEBUG oslo_concurrency.lockutils [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquiring lock "refresh_cache-7aa58e2f-1202-4252-9c38-ce53084c573f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.036472] env[68674]: DEBUG oslo_concurrency.lockutils [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquired lock "refresh_cache-7aa58e2f-1202-4252-9c38-ce53084c573f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 928.036472] env[68674]: DEBUG nova.network.neutron [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 928.253769] env[68674]: DEBUG nova.scheduler.client.report [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 928.292030] env[68674]: DEBUG oslo_vmware.api [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': task-3240347, 'name': Rename_Task, 'duration_secs': 0.140556} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.292030] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 928.292030] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-266d8f52-4754-4cef-a7da-30c73689718b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.297615] env[68674]: DEBUG oslo_vmware.api [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Waiting for the task: (returnval){ [ 928.297615] env[68674]: value = "task-3240351" [ 928.297615] env[68674]: _type = "Task" [ 928.297615] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.304939] env[68674]: DEBUG oslo_vmware.api [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': task-3240351, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.441048] env[68674]: DEBUG oslo_vmware.api [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240348, 'name': ReconfigVM_Task, 'duration_secs': 0.307062} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.443873] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Reconfigured VM instance instance-0000004f to attach disk [datastore2] e684ec31-b5d9-458c-bbba-36ada7f275bd/e684ec31-b5d9-458c-bbba-36ada7f275bd.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 928.444469] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-39d4b5c8-75b2-4c88-8f20-cb7b2e430562 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.451894] env[68674]: DEBUG oslo_vmware.api [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 928.451894] env[68674]: value = "task-3240353" [ 928.451894] env[68674]: _type = "Task" [ 928.451894] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.461012] env[68674]: DEBUG oslo_vmware.api [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240353, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.535165] env[68674]: DEBUG nova.network.neutron [None req-9ed512fa-73f4-43ec-af31-6d921b025ef6 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Updating instance_info_cache with network_info: [{"id": "3bad0946-10af-40d0-a8c2-a5469f09cf39", "address": "fa:16:3e:92:b8:08", "network": {"id": "6e938754-bc83-4806-86a2-808cd64ac44a", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-632467165-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.166", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30b0da251a0d4f9c96f907b31ef9d5e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4d3f69a-b086-4c3b-b976-5a848b63dfc4", "external-id": "nsx-vlan-transportzone-627", "segmentation_id": 627, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3bad0946-10", "ovs_interfaceid": "3bad0946-10af-40d0-a8c2-a5469f09cf39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.759143] env[68674]: DEBUG oslo_concurrency.lockutils [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.406s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 928.759831] env[68674]: DEBUG nova.compute.manager [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 928.763544] env[68674]: DEBUG nova.network.neutron [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Updating instance_info_cache with network_info: [{"id": "d24d2f1b-cc82-45a9-8d5c-94505a4de39f", "address": "fa:16:3e:b3:08:90", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.109", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd24d2f1b-cc", "ovs_interfaceid": "d24d2f1b-cc82-45a9-8d5c-94505a4de39f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.764727] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.433s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 928.766949] env[68674]: INFO nova.compute.claims [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 928.810393] env[68674]: DEBUG oslo_vmware.api [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': task-3240351, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.962372] env[68674]: DEBUG oslo_vmware.api [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240353, 'name': Rename_Task, 'duration_secs': 0.170229} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.962783] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 928.963137] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f83871ce-15df-43f4-9246-fdba3e91da34 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.970576] env[68674]: DEBUG oslo_vmware.api [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 928.970576] env[68674]: value = "task-3240354" [ 928.970576] env[68674]: _type = "Task" [ 928.970576] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.979816] env[68674]: DEBUG oslo_vmware.api [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240354, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.037702] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9ed512fa-73f4-43ec-af31-6d921b025ef6 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Releasing lock "refresh_cache-5e3f667c-5d3a-4465-9186-779563087480" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 929.037973] env[68674]: DEBUG nova.compute.manager [None req-9ed512fa-73f4-43ec-af31-6d921b025ef6 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Inject network info {{(pid=68674) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 929.038266] env[68674]: DEBUG nova.compute.manager [None req-9ed512fa-73f4-43ec-af31-6d921b025ef6 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] network_info to inject: |[{"id": "3bad0946-10af-40d0-a8c2-a5469f09cf39", "address": "fa:16:3e:92:b8:08", "network": {"id": "6e938754-bc83-4806-86a2-808cd64ac44a", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-632467165-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.166", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30b0da251a0d4f9c96f907b31ef9d5e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4d3f69a-b086-4c3b-b976-5a848b63dfc4", "external-id": "nsx-vlan-transportzone-627", "segmentation_id": 627, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3bad0946-10", "ovs_interfaceid": "3bad0946-10af-40d0-a8c2-a5469f09cf39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 929.043511] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9ed512fa-73f4-43ec-af31-6d921b025ef6 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Reconfiguring VM instance to set the machine id {{(pid=68674) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 929.043862] env[68674]: DEBUG oslo_concurrency.lockutils [req-4b627bfa-429e-4855-922d-6b1f4fadac01 req-6df0b031-968c-4552-a4b3-62d850dc7867 service nova] Acquired lock "refresh_cache-5e3f667c-5d3a-4465-9186-779563087480" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 929.044088] env[68674]: DEBUG nova.network.neutron [req-4b627bfa-429e-4855-922d-6b1f4fadac01 req-6df0b031-968c-4552-a4b3-62d850dc7867 service nova] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Refreshing network info cache for port 3bad0946-10af-40d0-a8c2-a5469f09cf39 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 929.045441] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-755e1b28-bab8-4838-b8dd-90c500852a20 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.065091] env[68674]: DEBUG oslo_vmware.api [None req-9ed512fa-73f4-43ec-af31-6d921b025ef6 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Waiting for the task: (returnval){ [ 929.065091] env[68674]: value = "task-3240355" [ 929.065091] env[68674]: _type = "Task" [ 929.065091] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.075340] env[68674]: DEBUG oslo_vmware.api [None req-9ed512fa-73f4-43ec-af31-6d921b025ef6 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Task: {'id': task-3240355, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.272223] env[68674]: DEBUG oslo_concurrency.lockutils [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Releasing lock "refresh_cache-7aa58e2f-1202-4252-9c38-ce53084c573f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 929.272661] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 929.273870] env[68674]: DEBUG nova.compute.utils [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 929.275215] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-54ccb44a-ac4d-4891-b74d-754338246a56 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.277458] env[68674]: DEBUG nova.compute.manager [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 929.277610] env[68674]: DEBUG nova.network.neutron [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 929.286089] env[68674]: DEBUG oslo_vmware.api [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 929.286089] env[68674]: value = "task-3240356" [ 929.286089] env[68674]: _type = "Task" [ 929.286089] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.296266] env[68674]: DEBUG oslo_vmware.api [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240356, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.307989] env[68674]: DEBUG oslo_vmware.api [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': task-3240351, 'name': PowerOnVM_Task, 'duration_secs': 0.674504} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.308585] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 929.308585] env[68674]: DEBUG nova.compute.manager [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 929.309483] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cf81499-dc34-4fc2-84da-a496fdd6aaec {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.321305] env[68674]: DEBUG nova.policy [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '50e46e8c9fbc4778b5f89359ae81bfa3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f6b179855b874365964446f95f9f5a53', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 929.485182] env[68674]: DEBUG oslo_vmware.api [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240354, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.577300] env[68674]: DEBUG oslo_vmware.api [None req-9ed512fa-73f4-43ec-af31-6d921b025ef6 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Task: {'id': task-3240355, 'name': ReconfigVM_Task, 'duration_secs': 0.141727} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.577300] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9ed512fa-73f4-43ec-af31-6d921b025ef6 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Reconfigured VM instance to set the machine id {{(pid=68674) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 929.749370] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4810fe99-909b-4010-a12c-11633f318162 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Acquiring lock "5e3f667c-5d3a-4465-9186-779563087480" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 929.749889] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4810fe99-909b-4010-a12c-11633f318162 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Lock "5e3f667c-5d3a-4465-9186-779563087480" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 929.749889] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4810fe99-909b-4010-a12c-11633f318162 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Acquiring lock "5e3f667c-5d3a-4465-9186-779563087480-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 929.750093] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4810fe99-909b-4010-a12c-11633f318162 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Lock "5e3f667c-5d3a-4465-9186-779563087480-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 929.750294] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4810fe99-909b-4010-a12c-11633f318162 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Lock "5e3f667c-5d3a-4465-9186-779563087480-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 929.754798] env[68674]: INFO nova.compute.manager [None req-4810fe99-909b-4010-a12c-11633f318162 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Terminating instance [ 929.782018] env[68674]: DEBUG nova.compute.manager [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 929.795859] env[68674]: DEBUG oslo_vmware.api [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240356, 'name': PowerOffVM_Task, 'duration_secs': 0.204466} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.796128] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 929.796773] env[68674]: DEBUG nova.virt.hardware [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:10:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='0763b137-0ce8-4652-8505-6b8377dc2900',id=39,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-684543547',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 929.796962] env[68674]: DEBUG nova.virt.hardware [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 929.797128] env[68674]: DEBUG nova.virt.hardware [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 929.797309] env[68674]: DEBUG nova.virt.hardware [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 929.797566] env[68674]: DEBUG nova.virt.hardware [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 929.797632] env[68674]: DEBUG nova.virt.hardware [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 929.797797] env[68674]: DEBUG nova.virt.hardware [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 929.797953] env[68674]: DEBUG nova.virt.hardware [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 929.798131] env[68674]: DEBUG nova.virt.hardware [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 929.798290] env[68674]: DEBUG nova.virt.hardware [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 929.798456] env[68674]: DEBUG nova.virt.hardware [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 929.804459] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a5c0fac2-3f9c-4fff-a072-09de5eb745e4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.828742] env[68674]: DEBUG oslo_vmware.api [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 929.828742] env[68674]: value = "task-3240357" [ 929.828742] env[68674]: _type = "Task" [ 929.828742] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.835430] env[68674]: DEBUG oslo_concurrency.lockutils [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 929.842172] env[68674]: DEBUG oslo_vmware.api [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240357, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.858211] env[68674]: DEBUG nova.network.neutron [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Successfully created port: 88af4dfc-59d6-4564-9ca9-d5383ed87da6 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 929.969019] env[68674]: DEBUG nova.network.neutron [req-4b627bfa-429e-4855-922d-6b1f4fadac01 req-6df0b031-968c-4552-a4b3-62d850dc7867 service nova] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Updated VIF entry in instance network info cache for port 3bad0946-10af-40d0-a8c2-a5469f09cf39. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 929.969019] env[68674]: DEBUG nova.network.neutron [req-4b627bfa-429e-4855-922d-6b1f4fadac01 req-6df0b031-968c-4552-a4b3-62d850dc7867 service nova] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Updating instance_info_cache with network_info: [{"id": "3bad0946-10af-40d0-a8c2-a5469f09cf39", "address": "fa:16:3e:92:b8:08", "network": {"id": "6e938754-bc83-4806-86a2-808cd64ac44a", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-632467165-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.166", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30b0da251a0d4f9c96f907b31ef9d5e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4d3f69a-b086-4c3b-b976-5a848b63dfc4", "external-id": "nsx-vlan-transportzone-627", "segmentation_id": 627, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3bad0946-10", "ovs_interfaceid": "3bad0946-10af-40d0-a8c2-a5469f09cf39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.983948] env[68674]: DEBUG oslo_vmware.api [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240354, 'name': PowerOnVM_Task, 'duration_secs': 0.61323} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.987447] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 929.987447] env[68674]: INFO nova.compute.manager [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Took 8.08 seconds to spawn the instance on the hypervisor. [ 929.987447] env[68674]: DEBUG nova.compute.manager [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 929.988306] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d655682c-8a7d-47b4-9203-40732cd2d9ae {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.159280] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e8f463f-c4ed-462d-8d15-5b067115cf26 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.168024] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04915285-a45a-4ea5-8ec3-9708d8e0650e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.200068] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c5b9d9a-60d2-411f-8096-4d77fbfcb9ba {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.207376] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c0cc3f-356a-465d-bd14-dd313b94d711 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.221387] env[68674]: DEBUG nova.compute.provider_tree [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 930.258824] env[68674]: DEBUG nova.compute.manager [None req-4810fe99-909b-4010-a12c-11633f318162 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 930.259075] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4810fe99-909b-4010-a12c-11633f318162 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 930.259898] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49c1f7a1-3821-4481-a610-9e6c47236f2c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.267966] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4810fe99-909b-4010-a12c-11633f318162 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 930.268228] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-55c06bf8-579c-42f4-b92f-2e03b2894dae {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.274703] env[68674]: DEBUG oslo_vmware.api [None req-4810fe99-909b-4010-a12c-11633f318162 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Waiting for the task: (returnval){ [ 930.274703] env[68674]: value = "task-3240358" [ 930.274703] env[68674]: _type = "Task" [ 930.274703] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.286839] env[68674]: DEBUG oslo_vmware.api [None req-4810fe99-909b-4010-a12c-11633f318162 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Task: {'id': task-3240358, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.339664] env[68674]: DEBUG oslo_vmware.api [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240357, 'name': ReconfigVM_Task, 'duration_secs': 0.157601} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.340502] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b124d9c-fb36-44d4-821c-e0f767950a9c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.358928] env[68674]: DEBUG nova.virt.hardware [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:10:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='0763b137-0ce8-4652-8505-6b8377dc2900',id=39,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-684543547',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 930.359186] env[68674]: DEBUG nova.virt.hardware [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 930.359342] env[68674]: DEBUG nova.virt.hardware [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 930.359689] env[68674]: DEBUG nova.virt.hardware [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 930.359689] env[68674]: DEBUG nova.virt.hardware [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 930.359799] env[68674]: DEBUG nova.virt.hardware [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 930.360017] env[68674]: DEBUG nova.virt.hardware [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 930.360198] env[68674]: DEBUG nova.virt.hardware [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 930.360430] env[68674]: DEBUG nova.virt.hardware [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 930.360526] env[68674]: DEBUG nova.virt.hardware [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 930.360734] env[68674]: DEBUG nova.virt.hardware [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 930.361535] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9019cde-277c-403c-b66b-da1c72db1034 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.366983] env[68674]: DEBUG oslo_vmware.api [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 930.366983] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5254ed64-90f4-e303-5a67-e7151e446093" [ 930.366983] env[68674]: _type = "Task" [ 930.366983] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.374693] env[68674]: DEBUG oslo_vmware.api [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5254ed64-90f4-e303-5a67-e7151e446093, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.469931] env[68674]: DEBUG oslo_concurrency.lockutils [req-4b627bfa-429e-4855-922d-6b1f4fadac01 req-6df0b031-968c-4552-a4b3-62d850dc7867 service nova] Releasing lock "refresh_cache-5e3f667c-5d3a-4465-9186-779563087480" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 930.507708] env[68674]: INFO nova.compute.manager [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Took 29.30 seconds to build instance. [ 930.521380] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Acquiring lock "245089a5-929d-49b0-aa36-749d342e8473" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 930.521682] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Lock "245089a5-929d-49b0-aa36-749d342e8473" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 930.522038] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Acquiring lock "245089a5-929d-49b0-aa36-749d342e8473-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 930.522113] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Lock "245089a5-929d-49b0-aa36-749d342e8473-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 930.522292] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Lock "245089a5-929d-49b0-aa36-749d342e8473-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 930.524646] env[68674]: INFO nova.compute.manager [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Terminating instance [ 930.724180] env[68674]: DEBUG nova.scheduler.client.report [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 930.785404] env[68674]: DEBUG oslo_vmware.api [None req-4810fe99-909b-4010-a12c-11633f318162 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Task: {'id': task-3240358, 'name': PowerOffVM_Task, 'duration_secs': 0.303302} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.785699] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4810fe99-909b-4010-a12c-11633f318162 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 930.785870] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4810fe99-909b-4010-a12c-11633f318162 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 930.786172] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2afe4e82-48d0-42d6-b5d9-850744c542c4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.788528] env[68674]: DEBUG nova.compute.manager [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 930.813888] env[68674]: DEBUG nova.virt.hardware [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 930.814049] env[68674]: DEBUG nova.virt.hardware [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 930.814215] env[68674]: DEBUG nova.virt.hardware [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 930.814398] env[68674]: DEBUG nova.virt.hardware [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 930.814546] env[68674]: DEBUG nova.virt.hardware [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 930.814708] env[68674]: DEBUG nova.virt.hardware [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 930.814945] env[68674]: DEBUG nova.virt.hardware [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 930.815145] env[68674]: DEBUG nova.virt.hardware [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 930.815338] env[68674]: DEBUG nova.virt.hardware [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 930.815505] env[68674]: DEBUG nova.virt.hardware [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 930.815674] env[68674]: DEBUG nova.virt.hardware [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 930.816847] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb7b090-e5e9-43cc-be1e-8c5641f2fad0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.825009] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-964113d4-6aa9-4654-9df4-751046cc105d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.876337] env[68674]: DEBUG oslo_vmware.api [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5254ed64-90f4-e303-5a67-e7151e446093, 'name': SearchDatastore_Task, 'duration_secs': 0.008694} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.881853] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Reconfiguring VM instance instance-0000003f to detach disk 2000 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 930.883587] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a06b6b10-8098-4901-846b-79d6b84156e7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.896734] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4810fe99-909b-4010-a12c-11633f318162 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 930.897079] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4810fe99-909b-4010-a12c-11633f318162 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 930.897193] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-4810fe99-909b-4010-a12c-11633f318162 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Deleting the datastore file [datastore1] 5e3f667c-5d3a-4465-9186-779563087480 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 930.897738] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-097c1054-7149-493b-9230-c387e82b0e91 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.904457] env[68674]: DEBUG oslo_vmware.api [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 930.904457] env[68674]: value = "task-3240362" [ 930.904457] env[68674]: _type = "Task" [ 930.904457] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.905777] env[68674]: DEBUG oslo_vmware.api [None req-4810fe99-909b-4010-a12c-11633f318162 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Waiting for the task: (returnval){ [ 930.905777] env[68674]: value = "task-3240361" [ 930.905777] env[68674]: _type = "Task" [ 930.905777] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.916325] env[68674]: DEBUG oslo_vmware.api [None req-4810fe99-909b-4010-a12c-11633f318162 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Task: {'id': task-3240361, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.919516] env[68674]: DEBUG oslo_vmware.api [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240362, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.011406] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f8fecb07-4115-4c8f-be26-65f0371f0f94 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "e684ec31-b5d9-458c-bbba-36ada7f275bd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.825s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 931.028234] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Acquiring lock "refresh_cache-245089a5-929d-49b0-aa36-749d342e8473" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.028486] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Acquired lock "refresh_cache-245089a5-929d-49b0-aa36-749d342e8473" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 931.028758] env[68674]: DEBUG nova.network.neutron [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 931.229108] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.464s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 931.229667] env[68674]: DEBUG nova.compute.manager [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 931.232429] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2b4cfb5d-3905-4214-964f-f3dc61ebdda9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.689s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 931.233028] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2b4cfb5d-3905-4214-964f-f3dc61ebdda9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 931.235302] env[68674]: DEBUG oslo_concurrency.lockutils [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.568s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 931.236769] env[68674]: INFO nova.compute.claims [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 931.259942] env[68674]: INFO nova.scheduler.client.report [None req-2b4cfb5d-3905-4214-964f-f3dc61ebdda9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Deleted allocations for instance e1283f87-5bdb-4d4e-a1c5-f3b1c9180188 [ 931.352884] env[68674]: DEBUG nova.compute.manager [req-d69c21f1-5081-4a0e-9920-31c6c9d02e21 req-3751b58f-270b-43e6-978c-f799dca02164 service nova] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Received event network-vif-plugged-88af4dfc-59d6-4564-9ca9-d5383ed87da6 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 931.353189] env[68674]: DEBUG oslo_concurrency.lockutils [req-d69c21f1-5081-4a0e-9920-31c6c9d02e21 req-3751b58f-270b-43e6-978c-f799dca02164 service nova] Acquiring lock "ffdd1c62-1b4e-40cf-a27e-ff2877439701-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 931.353441] env[68674]: DEBUG oslo_concurrency.lockutils [req-d69c21f1-5081-4a0e-9920-31c6c9d02e21 req-3751b58f-270b-43e6-978c-f799dca02164 service nova] Lock "ffdd1c62-1b4e-40cf-a27e-ff2877439701-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 931.353511] env[68674]: DEBUG oslo_concurrency.lockutils [req-d69c21f1-5081-4a0e-9920-31c6c9d02e21 req-3751b58f-270b-43e6-978c-f799dca02164 service nova] Lock "ffdd1c62-1b4e-40cf-a27e-ff2877439701-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 931.353681] env[68674]: DEBUG nova.compute.manager [req-d69c21f1-5081-4a0e-9920-31c6c9d02e21 req-3751b58f-270b-43e6-978c-f799dca02164 service nova] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] No waiting events found dispatching network-vif-plugged-88af4dfc-59d6-4564-9ca9-d5383ed87da6 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 931.353848] env[68674]: WARNING nova.compute.manager [req-d69c21f1-5081-4a0e-9920-31c6c9d02e21 req-3751b58f-270b-43e6-978c-f799dca02164 service nova] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Received unexpected event network-vif-plugged-88af4dfc-59d6-4564-9ca9-d5383ed87da6 for instance with vm_state building and task_state spawning. [ 931.421591] env[68674]: DEBUG oslo_vmware.api [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240362, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.424702] env[68674]: DEBUG oslo_vmware.api [None req-4810fe99-909b-4010-a12c-11633f318162 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Task: {'id': task-3240361, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163369} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.425270] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-4810fe99-909b-4010-a12c-11633f318162 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 931.425270] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4810fe99-909b-4010-a12c-11633f318162 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 931.425403] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4810fe99-909b-4010-a12c-11633f318162 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 931.425479] env[68674]: INFO nova.compute.manager [None req-4810fe99-909b-4010-a12c-11633f318162 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Took 1.17 seconds to destroy the instance on the hypervisor. [ 931.425703] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4810fe99-909b-4010-a12c-11633f318162 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 931.426041] env[68674]: DEBUG nova.compute.manager [-] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 931.426156] env[68674]: DEBUG nova.network.neutron [-] [instance: 5e3f667c-5d3a-4465-9186-779563087480] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 931.454343] env[68674]: DEBUG nova.network.neutron [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Successfully updated port: 88af4dfc-59d6-4564-9ca9-d5383ed87da6 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 931.565881] env[68674]: DEBUG nova.network.neutron [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 931.675410] env[68674]: DEBUG nova.network.neutron [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.745129] env[68674]: DEBUG nova.compute.utils [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 931.746175] env[68674]: DEBUG nova.compute.manager [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 931.746359] env[68674]: DEBUG nova.network.neutron [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 931.749045] env[68674]: DEBUG nova.compute.manager [None req-ff4ddf4c-c76a-4947-81d0-f4289eac2833 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 931.749982] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0a42c1c-368b-43d8-be65-67dbd54d4267 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.767817] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2b4cfb5d-3905-4214-964f-f3dc61ebdda9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "e1283f87-5bdb-4d4e-a1c5-f3b1c9180188" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.051s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 931.808710] env[68674]: DEBUG nova.policy [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7b3a4c2c5bae41998d58a116e648883d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aa34d6d90c6d46aaa2cb77259b5e0c27', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 931.920083] env[68674]: DEBUG oslo_vmware.api [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240362, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.958176] env[68674]: DEBUG oslo_concurrency.lockutils [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "refresh_cache-ffdd1c62-1b4e-40cf-a27e-ff2877439701" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.958292] env[68674]: DEBUG oslo_concurrency.lockutils [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquired lock "refresh_cache-ffdd1c62-1b4e-40cf-a27e-ff2877439701" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 931.958408] env[68674]: DEBUG nova.network.neutron [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 932.181229] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Releasing lock "refresh_cache-245089a5-929d-49b0-aa36-749d342e8473" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 932.181229] env[68674]: DEBUG nova.compute.manager [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 932.181229] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 932.181229] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94853eae-9f8e-4f48-b8a6-3996a333b9b1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.190066] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 932.190321] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-169fd543-f0f8-4fdc-bd73-2252b79a728f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.196482] env[68674]: DEBUG oslo_vmware.api [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Waiting for the task: (returnval){ [ 932.196482] env[68674]: value = "task-3240363" [ 932.196482] env[68674]: _type = "Task" [ 932.196482] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.208451] env[68674]: DEBUG oslo_vmware.api [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': task-3240363, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.248824] env[68674]: DEBUG nova.compute.manager [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 932.266486] env[68674]: INFO nova.compute.manager [None req-ff4ddf4c-c76a-4947-81d0-f4289eac2833 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] instance snapshotting [ 932.271041] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b3dbe56-f07f-4b91-a7ce-68c97d0f5db1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.308986] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f81dda6-e0fb-46e2-9880-c63ded0935f6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.381533] env[68674]: DEBUG nova.network.neutron [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Successfully created port: 7ea7b81b-2dc1-4015-9bb0-53ebf3f3c2b7 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 932.423444] env[68674]: DEBUG oslo_vmware.api [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240362, 'name': ReconfigVM_Task, 'duration_secs': 1.179682} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.423773] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Reconfigured VM instance instance-0000003f to detach disk 2000 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 932.424651] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0c5df43-ec0a-4723-bba3-3a5296683981 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.458052] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Reconfiguring VM instance instance-0000003f to attach disk [datastore2] 7aa58e2f-1202-4252-9c38-ce53084c573f/7aa58e2f-1202-4252-9c38-ce53084c573f.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 932.461075] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8376bd0-e57f-44ec-b025-74517a8ad8bc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.482472] env[68674]: DEBUG oslo_vmware.api [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 932.482472] env[68674]: value = "task-3240364" [ 932.482472] env[68674]: _type = "Task" [ 932.482472] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.494933] env[68674]: DEBUG oslo_vmware.api [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240364, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.522077] env[68674]: DEBUG nova.network.neutron [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 932.683444] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40662897-8b88-42ba-a148-6ef166eeff6b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.686187] env[68674]: DEBUG nova.network.neutron [-] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.695970] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca4f386-685b-48ae-93c5-21fb2f95ee97 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.711047] env[68674]: DEBUG oslo_vmware.api [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': task-3240363, 'name': PowerOffVM_Task, 'duration_secs': 0.134109} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.745827] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 932.747032] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 932.749410] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cb6e78d3-ad1c-4758-babe-f97cc99f05e2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.751982] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aba3d3f-2ad3-47b1-adbc-4719ac91b10b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.766038] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5bbb241-76f0-4b30-8885-f952d3c15586 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.782944] env[68674]: DEBUG nova.compute.provider_tree [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 932.787869] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 932.787869] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 932.787869] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Deleting the datastore file [datastore1] 245089a5-929d-49b0-aa36-749d342e8473 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 932.787869] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b84738e1-ec38-48f6-8baf-a28871889120 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.793142] env[68674]: DEBUG oslo_vmware.api [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Waiting for the task: (returnval){ [ 932.793142] env[68674]: value = "task-3240366" [ 932.793142] env[68674]: _type = "Task" [ 932.793142] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.801658] env[68674]: DEBUG oslo_vmware.api [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': task-3240366, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.819710] env[68674]: DEBUG nova.network.neutron [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Updating instance_info_cache with network_info: [{"id": "88af4dfc-59d6-4564-9ca9-d5383ed87da6", "address": "fa:16:3e:92:28:03", "network": {"id": "d412f884-932c-461f-8f04-990897b04532", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-692483706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6b179855b874365964446f95f9f5a53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap88af4dfc-59", "ovs_interfaceid": "88af4dfc-59d6-4564-9ca9-d5383ed87da6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.826363] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ff4ddf4c-c76a-4947-81d0-f4289eac2833 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Creating Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 932.826363] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-50eaf726-216c-4eac-8250-a7d629fd3945 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.831644] env[68674]: DEBUG oslo_vmware.api [None req-ff4ddf4c-c76a-4947-81d0-f4289eac2833 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 932.831644] env[68674]: value = "task-3240367" [ 932.831644] env[68674]: _type = "Task" [ 932.831644] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.844053] env[68674]: DEBUG oslo_vmware.api [None req-ff4ddf4c-c76a-4947-81d0-f4289eac2833 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240367, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.926301] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-bfe4bf99-19ec-4a0e-ae1e-411e390d78b9 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Volume attach. Driver type: vmdk {{(pid=68674) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 932.926547] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-bfe4bf99-19ec-4a0e-ae1e-411e390d78b9 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647601', 'volume_id': 'f5c1aae5-59a0-408e-93e1-e7b8fbdd4a32', 'name': 'volume-f5c1aae5-59a0-408e-93e1-e7b8fbdd4a32', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7d953e59-53c1-4041-a641-35c12c012f7e', 'attached_at': '', 'detached_at': '', 'volume_id': 'f5c1aae5-59a0-408e-93e1-e7b8fbdd4a32', 'serial': 'f5c1aae5-59a0-408e-93e1-e7b8fbdd4a32'} {{(pid=68674) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 932.928027] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b05682d0-41bc-4d42-bfbe-91cb119ddc79 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.950208] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea19b632-8223-4181-90b7-df345af85292 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.978958] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-bfe4bf99-19ec-4a0e-ae1e-411e390d78b9 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] volume-f5c1aae5-59a0-408e-93e1-e7b8fbdd4a32/volume-f5c1aae5-59a0-408e-93e1-e7b8fbdd4a32.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 932.978958] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e107be26-2de5-4bb2-9cca-259e10a38635 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.000473] env[68674]: DEBUG oslo_vmware.api [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240364, 'name': ReconfigVM_Task, 'duration_secs': 0.293278} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.001652] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Reconfigured VM instance instance-0000003f to attach disk [datastore2] 7aa58e2f-1202-4252-9c38-ce53084c573f/7aa58e2f-1202-4252-9c38-ce53084c573f.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 933.002066] env[68674]: DEBUG oslo_vmware.api [None req-bfe4bf99-19ec-4a0e-ae1e-411e390d78b9 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 933.002066] env[68674]: value = "task-3240368" [ 933.002066] env[68674]: _type = "Task" [ 933.002066] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.002744] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2934526-a703-49e5-8cb6-68c53a2df231 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.016534] env[68674]: DEBUG oslo_vmware.api [None req-bfe4bf99-19ec-4a0e-ae1e-411e390d78b9 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240368, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.032811] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5ac7adc-5a1f-4f6f-bdbd-3713fc0429b6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.052427] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3763dc9a-402e-4f3c-b30e-fe0dbaa7a679 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.076019] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19ed5de3-b9a1-4258-850c-699955fd963c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.080171] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 933.080455] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9b05b929-5760-4f83-bd04-36ca67be9293 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.086871] env[68674]: DEBUG oslo_vmware.api [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 933.086871] env[68674]: value = "task-3240369" [ 933.086871] env[68674]: _type = "Task" [ 933.086871] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.094901] env[68674]: DEBUG oslo_vmware.api [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240369, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.189272] env[68674]: INFO nova.compute.manager [-] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Took 1.76 seconds to deallocate network for instance. [ 933.262900] env[68674]: DEBUG nova.compute.manager [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 933.287956] env[68674]: DEBUG nova.scheduler.client.report [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 933.293645] env[68674]: DEBUG nova.virt.hardware [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 933.293944] env[68674]: DEBUG nova.virt.hardware [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 933.294027] env[68674]: DEBUG nova.virt.hardware [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 933.294212] env[68674]: DEBUG nova.virt.hardware [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 933.295245] env[68674]: DEBUG nova.virt.hardware [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 933.295245] env[68674]: DEBUG nova.virt.hardware [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 933.295245] env[68674]: DEBUG nova.virt.hardware [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 933.295245] env[68674]: DEBUG nova.virt.hardware [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 933.295245] env[68674]: DEBUG nova.virt.hardware [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 933.295245] env[68674]: DEBUG nova.virt.hardware [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 933.295485] env[68674]: DEBUG nova.virt.hardware [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 933.296224] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baebc302-e708-49df-b8bb-b7f156dc05da {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.310081] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1149e3d2-a480-42d1-bc97-51d9097fd604 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.314287] env[68674]: DEBUG oslo_vmware.api [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Task: {'id': task-3240366, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.099214} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.314525] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 933.314700] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 933.314871] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 933.315045] env[68674]: INFO nova.compute.manager [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Took 1.14 seconds to destroy the instance on the hypervisor. [ 933.315277] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 933.315817] env[68674]: DEBUG nova.compute.manager [-] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 933.315928] env[68674]: DEBUG nova.network.neutron [-] [instance: 245089a5-929d-49b0-aa36-749d342e8473] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 933.325921] env[68674]: DEBUG oslo_concurrency.lockutils [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Releasing lock "refresh_cache-ffdd1c62-1b4e-40cf-a27e-ff2877439701" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 933.326242] env[68674]: DEBUG nova.compute.manager [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Instance network_info: |[{"id": "88af4dfc-59d6-4564-9ca9-d5383ed87da6", "address": "fa:16:3e:92:28:03", "network": {"id": "d412f884-932c-461f-8f04-990897b04532", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-692483706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6b179855b874365964446f95f9f5a53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap88af4dfc-59", "ovs_interfaceid": "88af4dfc-59d6-4564-9ca9-d5383ed87da6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 933.327161] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:92:28:03', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '08fb4857-7f9b-4f97-86ef-415341fb595d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '88af4dfc-59d6-4564-9ca9-d5383ed87da6', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 933.336497] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 933.336497] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 933.338580] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-42b7eb82-bf10-4062-94ea-4b452c2a048f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.354517] env[68674]: DEBUG nova.network.neutron [-] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 933.359044] env[68674]: DEBUG oslo_vmware.api [None req-ff4ddf4c-c76a-4947-81d0-f4289eac2833 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240367, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.360788] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 933.360788] env[68674]: value = "task-3240370" [ 933.360788] env[68674]: _type = "Task" [ 933.360788] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.371136] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240370, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.405644] env[68674]: DEBUG nova.compute.manager [req-a58406e1-3eeb-41fc-a5da-feedcb25971f req-a7f8e3e8-a21a-494b-82ad-fe4e14410a42 service nova] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Received event network-changed-88af4dfc-59d6-4564-9ca9-d5383ed87da6 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 933.405894] env[68674]: DEBUG nova.compute.manager [req-a58406e1-3eeb-41fc-a5da-feedcb25971f req-a7f8e3e8-a21a-494b-82ad-fe4e14410a42 service nova] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Refreshing instance network info cache due to event network-changed-88af4dfc-59d6-4564-9ca9-d5383ed87da6. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 933.406114] env[68674]: DEBUG oslo_concurrency.lockutils [req-a58406e1-3eeb-41fc-a5da-feedcb25971f req-a7f8e3e8-a21a-494b-82ad-fe4e14410a42 service nova] Acquiring lock "refresh_cache-ffdd1c62-1b4e-40cf-a27e-ff2877439701" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.406267] env[68674]: DEBUG oslo_concurrency.lockutils [req-a58406e1-3eeb-41fc-a5da-feedcb25971f req-a7f8e3e8-a21a-494b-82ad-fe4e14410a42 service nova] Acquired lock "refresh_cache-ffdd1c62-1b4e-40cf-a27e-ff2877439701" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 933.406448] env[68674]: DEBUG nova.network.neutron [req-a58406e1-3eeb-41fc-a5da-feedcb25971f req-a7f8e3e8-a21a-494b-82ad-fe4e14410a42 service nova] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Refreshing network info cache for port 88af4dfc-59d6-4564-9ca9-d5383ed87da6 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 933.518833] env[68674]: DEBUG oslo_vmware.api [None req-bfe4bf99-19ec-4a0e-ae1e-411e390d78b9 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240368, 'name': ReconfigVM_Task, 'duration_secs': 0.379384} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.519249] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-bfe4bf99-19ec-4a0e-ae1e-411e390d78b9 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Reconfigured VM instance instance-00000048 to attach disk [datastore2] volume-f5c1aae5-59a0-408e-93e1-e7b8fbdd4a32/volume-f5c1aae5-59a0-408e-93e1-e7b8fbdd4a32.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 933.524297] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d6dadc0-f3e1-49db-a8ae-78fe0b69a841 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.540085] env[68674]: DEBUG oslo_vmware.api [None req-bfe4bf99-19ec-4a0e-ae1e-411e390d78b9 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 933.540085] env[68674]: value = "task-3240371" [ 933.540085] env[68674]: _type = "Task" [ 933.540085] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.550212] env[68674]: DEBUG oslo_vmware.api [None req-bfe4bf99-19ec-4a0e-ae1e-411e390d78b9 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240371, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.596897] env[68674]: DEBUG oslo_vmware.api [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240369, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.696199] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4810fe99-909b-4010-a12c-11633f318162 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 933.800537] env[68674]: DEBUG oslo_concurrency.lockutils [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.565s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 933.801200] env[68674]: DEBUG nova.compute.manager [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 933.803919] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f9512fee-5feb-49f4-8f10-d7c8d4133a82 tempest-DeleteServersAdminTestJSON-2002239449 tempest-DeleteServersAdminTestJSON-2002239449-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.639s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 933.804169] env[68674]: DEBUG nova.objects.instance [None req-f9512fee-5feb-49f4-8f10-d7c8d4133a82 tempest-DeleteServersAdminTestJSON-2002239449 tempest-DeleteServersAdminTestJSON-2002239449-project-admin] Lazy-loading 'resources' on Instance uuid 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 933.844716] env[68674]: DEBUG oslo_vmware.api [None req-ff4ddf4c-c76a-4947-81d0-f4289eac2833 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240367, 'name': CreateSnapshot_Task, 'duration_secs': 0.690404} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.845015] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ff4ddf4c-c76a-4947-81d0-f4289eac2833 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Created Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 933.845779] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2ffbf7b-6dd0-4227-920b-eb896cb7c5a8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.859846] env[68674]: DEBUG nova.network.neutron [-] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.869848] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240370, 'name': CreateVM_Task, 'duration_secs': 0.397603} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.870699] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 933.871490] env[68674]: DEBUG oslo_concurrency.lockutils [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.871580] env[68674]: DEBUG oslo_concurrency.lockutils [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 933.871888] env[68674]: DEBUG oslo_concurrency.lockutils [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 933.872437] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48120fed-26d4-4975-b8c8-a6cc921db4d5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.877191] env[68674]: DEBUG oslo_vmware.api [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 933.877191] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52be540d-1d12-de80-0d66-62eb8977807a" [ 933.877191] env[68674]: _type = "Task" [ 933.877191] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.885860] env[68674]: DEBUG oslo_vmware.api [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52be540d-1d12-de80-0d66-62eb8977807a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.051729] env[68674]: DEBUG oslo_vmware.api [None req-bfe4bf99-19ec-4a0e-ae1e-411e390d78b9 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240371, 'name': ReconfigVM_Task, 'duration_secs': 0.225323} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.052059] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-bfe4bf99-19ec-4a0e-ae1e-411e390d78b9 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647601', 'volume_id': 'f5c1aae5-59a0-408e-93e1-e7b8fbdd4a32', 'name': 'volume-f5c1aae5-59a0-408e-93e1-e7b8fbdd4a32', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7d953e59-53c1-4041-a641-35c12c012f7e', 'attached_at': '', 'detached_at': '', 'volume_id': 'f5c1aae5-59a0-408e-93e1-e7b8fbdd4a32', 'serial': 'f5c1aae5-59a0-408e-93e1-e7b8fbdd4a32'} {{(pid=68674) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 934.097827] env[68674]: DEBUG oslo_vmware.api [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240369, 'name': PowerOnVM_Task, 'duration_secs': 0.686617} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.101474] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 934.259989] env[68674]: DEBUG nova.network.neutron [req-a58406e1-3eeb-41fc-a5da-feedcb25971f req-a7f8e3e8-a21a-494b-82ad-fe4e14410a42 service nova] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Updated VIF entry in instance network info cache for port 88af4dfc-59d6-4564-9ca9-d5383ed87da6. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 934.260358] env[68674]: DEBUG nova.network.neutron [req-a58406e1-3eeb-41fc-a5da-feedcb25971f req-a7f8e3e8-a21a-494b-82ad-fe4e14410a42 service nova] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Updating instance_info_cache with network_info: [{"id": "88af4dfc-59d6-4564-9ca9-d5383ed87da6", "address": "fa:16:3e:92:28:03", "network": {"id": "d412f884-932c-461f-8f04-990897b04532", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-692483706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6b179855b874365964446f95f9f5a53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap88af4dfc-59", "ovs_interfaceid": "88af4dfc-59d6-4564-9ca9-d5383ed87da6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.293377] env[68674]: DEBUG nova.network.neutron [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Successfully updated port: 7ea7b81b-2dc1-4015-9bb0-53ebf3f3c2b7 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 934.307372] env[68674]: DEBUG nova.compute.utils [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 934.313592] env[68674]: DEBUG nova.compute.manager [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 934.313592] env[68674]: DEBUG nova.network.neutron [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 934.364281] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ff4ddf4c-c76a-4947-81d0-f4289eac2833 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Creating linked-clone VM from snapshot {{(pid=68674) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 934.366969] env[68674]: INFO nova.compute.manager [-] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Took 1.05 seconds to deallocate network for instance. [ 934.367233] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-da4d0be5-ba3e-4476-aa62-492db5e074e8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.371812] env[68674]: DEBUG nova.policy [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7d6c216061c342ef8010b34eb2fc552d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e0e50ba46714425a9ae403a170fe8fd4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 934.379324] env[68674]: DEBUG oslo_vmware.api [None req-ff4ddf4c-c76a-4947-81d0-f4289eac2833 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 934.379324] env[68674]: value = "task-3240372" [ 934.379324] env[68674]: _type = "Task" [ 934.379324] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.392282] env[68674]: DEBUG oslo_vmware.api [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52be540d-1d12-de80-0d66-62eb8977807a, 'name': SearchDatastore_Task, 'duration_secs': 0.014667} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.396288] env[68674]: DEBUG oslo_concurrency.lockutils [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 934.396479] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 934.397017] env[68674]: DEBUG oslo_concurrency.lockutils [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.397017] env[68674]: DEBUG oslo_concurrency.lockutils [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 934.397017] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 934.397233] env[68674]: DEBUG oslo_vmware.api [None req-ff4ddf4c-c76a-4947-81d0-f4289eac2833 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240372, 'name': CloneVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.399525] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-698a03a9-b97f-47df-97ce-fe0092afc2f0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.412678] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 934.412999] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 934.415879] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cdbe5e75-9eda-4abd-93ce-9227dfe99adb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.424290] env[68674]: DEBUG oslo_vmware.api [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 934.424290] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52e266dc-e904-b811-f6b7-86bea7b64865" [ 934.424290] env[68674]: _type = "Task" [ 934.424290] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.431236] env[68674]: DEBUG oslo_vmware.api [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52e266dc-e904-b811-f6b7-86bea7b64865, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.695995] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-925ec5ea-1fe6-4a7c-a57b-5a6d1cd59a05 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.704089] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c46cb50c-1daa-4a1a-ae85-8b09d826eab7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.737550] env[68674]: DEBUG nova.network.neutron [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Successfully created port: 0aaea2f0-3d87-4947-9cc4-0dffc9eddf95 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 934.740137] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aac7b4e2-b1ad-44c3-8b09-a2bc57199293 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.748987] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a03b01b5-d841-410f-868f-47ecab0e2bb8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.763547] env[68674]: DEBUG nova.compute.provider_tree [None req-f9512fee-5feb-49f4-8f10-d7c8d4133a82 tempest-DeleteServersAdminTestJSON-2002239449 tempest-DeleteServersAdminTestJSON-2002239449-project-admin] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 934.765855] env[68674]: DEBUG oslo_concurrency.lockutils [req-a58406e1-3eeb-41fc-a5da-feedcb25971f req-a7f8e3e8-a21a-494b-82ad-fe4e14410a42 service nova] Releasing lock "refresh_cache-ffdd1c62-1b4e-40cf-a27e-ff2877439701" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 934.766107] env[68674]: DEBUG nova.compute.manager [req-a58406e1-3eeb-41fc-a5da-feedcb25971f req-a7f8e3e8-a21a-494b-82ad-fe4e14410a42 service nova] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Received event network-vif-deleted-3bad0946-10af-40d0-a8c2-a5469f09cf39 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 934.795340] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "refresh_cache-f70145c9-4846-42e1-9c1c-de9759097abd" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.795450] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquired lock "refresh_cache-f70145c9-4846-42e1-9c1c-de9759097abd" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 934.795583] env[68674]: DEBUG nova.network.neutron [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 934.813458] env[68674]: DEBUG nova.compute.manager [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 934.878791] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 934.891442] env[68674]: DEBUG oslo_vmware.api [None req-ff4ddf4c-c76a-4947-81d0-f4289eac2833 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240372, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.934252] env[68674]: DEBUG oslo_vmware.api [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52e266dc-e904-b811-f6b7-86bea7b64865, 'name': SearchDatastore_Task, 'duration_secs': 0.019428} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.935482] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ab42e3e-d64e-4cc7-b8d6-caed17b207be {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.941885] env[68674]: DEBUG oslo_vmware.api [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 934.941885] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5212baa0-cd26-7954-e62e-833365b5a18c" [ 934.941885] env[68674]: _type = "Task" [ 934.941885] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.950876] env[68674]: DEBUG oslo_vmware.api [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5212baa0-cd26-7954-e62e-833365b5a18c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.115274] env[68674]: DEBUG nova.objects.instance [None req-bfe4bf99-19ec-4a0e-ae1e-411e390d78b9 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lazy-loading 'flavor' on Instance uuid 7d953e59-53c1-4041-a641-35c12c012f7e {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 935.123385] env[68674]: INFO nova.compute.manager [None req-79bcb643-b4e8-47cb-8a39-a846639534bf tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Updating instance to original state: 'active' [ 935.266519] env[68674]: DEBUG nova.scheduler.client.report [None req-f9512fee-5feb-49f4-8f10-d7c8d4133a82 tempest-DeleteServersAdminTestJSON-2002239449 tempest-DeleteServersAdminTestJSON-2002239449-project-admin] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 935.352067] env[68674]: DEBUG nova.network.neutron [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 935.399008] env[68674]: DEBUG oslo_vmware.api [None req-ff4ddf4c-c76a-4947-81d0-f4289eac2833 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240372, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.452140] env[68674]: DEBUG oslo_vmware.api [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5212baa0-cd26-7954-e62e-833365b5a18c, 'name': SearchDatastore_Task, 'duration_secs': 0.010378} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.454806] env[68674]: DEBUG oslo_concurrency.lockutils [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 935.455103] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] ffdd1c62-1b4e-40cf-a27e-ff2877439701/ffdd1c62-1b4e-40cf-a27e-ff2877439701.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 935.455344] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ade3964b-e74c-40b7-8454-44963e775bba {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.463838] env[68674]: DEBUG oslo_vmware.api [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 935.463838] env[68674]: value = "task-3240373" [ 935.463838] env[68674]: _type = "Task" [ 935.463838] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.472184] env[68674]: DEBUG oslo_vmware.api [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240373, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.498902] env[68674]: DEBUG nova.compute.manager [req-3acf0122-73c5-40e9-b433-a0155d5afe31 req-8c270ba3-e065-4540-b57d-e2f5803dc1cb service nova] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Received event network-vif-plugged-7ea7b81b-2dc1-4015-9bb0-53ebf3f3c2b7 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 935.499027] env[68674]: DEBUG oslo_concurrency.lockutils [req-3acf0122-73c5-40e9-b433-a0155d5afe31 req-8c270ba3-e065-4540-b57d-e2f5803dc1cb service nova] Acquiring lock "f70145c9-4846-42e1-9c1c-de9759097abd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 935.499278] env[68674]: DEBUG oslo_concurrency.lockutils [req-3acf0122-73c5-40e9-b433-a0155d5afe31 req-8c270ba3-e065-4540-b57d-e2f5803dc1cb service nova] Lock "f70145c9-4846-42e1-9c1c-de9759097abd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 935.499463] env[68674]: DEBUG oslo_concurrency.lockutils [req-3acf0122-73c5-40e9-b433-a0155d5afe31 req-8c270ba3-e065-4540-b57d-e2f5803dc1cb service nova] Lock "f70145c9-4846-42e1-9c1c-de9759097abd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 935.499703] env[68674]: DEBUG nova.compute.manager [req-3acf0122-73c5-40e9-b433-a0155d5afe31 req-8c270ba3-e065-4540-b57d-e2f5803dc1cb service nova] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] No waiting events found dispatching network-vif-plugged-7ea7b81b-2dc1-4015-9bb0-53ebf3f3c2b7 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 935.499880] env[68674]: WARNING nova.compute.manager [req-3acf0122-73c5-40e9-b433-a0155d5afe31 req-8c270ba3-e065-4540-b57d-e2f5803dc1cb service nova] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Received unexpected event network-vif-plugged-7ea7b81b-2dc1-4015-9bb0-53ebf3f3c2b7 for instance with vm_state building and task_state spawning. [ 935.500070] env[68674]: DEBUG nova.compute.manager [req-3acf0122-73c5-40e9-b433-a0155d5afe31 req-8c270ba3-e065-4540-b57d-e2f5803dc1cb service nova] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Received event network-changed-7ea7b81b-2dc1-4015-9bb0-53ebf3f3c2b7 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 935.500626] env[68674]: DEBUG nova.compute.manager [req-3acf0122-73c5-40e9-b433-a0155d5afe31 req-8c270ba3-e065-4540-b57d-e2f5803dc1cb service nova] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Refreshing instance network info cache due to event network-changed-7ea7b81b-2dc1-4015-9bb0-53ebf3f3c2b7. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 935.500626] env[68674]: DEBUG oslo_concurrency.lockutils [req-3acf0122-73c5-40e9-b433-a0155d5afe31 req-8c270ba3-e065-4540-b57d-e2f5803dc1cb service nova] Acquiring lock "refresh_cache-f70145c9-4846-42e1-9c1c-de9759097abd" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.560609] env[68674]: DEBUG nova.network.neutron [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Updating instance_info_cache with network_info: [{"id": "7ea7b81b-2dc1-4015-9bb0-53ebf3f3c2b7", "address": "fa:16:3e:52:b1:2d", "network": {"id": "2141da47-c6b2-4270-9d0f-d999f7c26b83", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-460904531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa34d6d90c6d46aaa2cb77259b5e0c27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b9aabc7c-0f6c-42eb-bd27-493a1496c0c8", "external-id": "nsx-vlan-transportzone-368", "segmentation_id": 368, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ea7b81b-2d", "ovs_interfaceid": "7ea7b81b-2dc1-4015-9bb0-53ebf3f3c2b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.623566] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bfe4bf99-19ec-4a0e-ae1e-411e390d78b9 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lock "7d953e59-53c1-4041-a641-35c12c012f7e" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.810s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 935.772536] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f9512fee-5feb-49f4-8f10-d7c8d4133a82 tempest-DeleteServersAdminTestJSON-2002239449 tempest-DeleteServersAdminTestJSON-2002239449-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.968s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 935.775310] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.604s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 935.775582] env[68674]: DEBUG nova.objects.instance [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lazy-loading 'resources' on Instance uuid 63d6c185-db2c-4ede-a716-9a0dd432ab1f {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 935.801739] env[68674]: INFO nova.scheduler.client.report [None req-f9512fee-5feb-49f4-8f10-d7c8d4133a82 tempest-DeleteServersAdminTestJSON-2002239449 tempest-DeleteServersAdminTestJSON-2002239449-project-admin] Deleted allocations for instance 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3 [ 935.824614] env[68674]: DEBUG nova.compute.manager [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 935.857462] env[68674]: DEBUG nova.virt.hardware [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 935.857840] env[68674]: DEBUG nova.virt.hardware [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 935.858047] env[68674]: DEBUG nova.virt.hardware [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 935.858244] env[68674]: DEBUG nova.virt.hardware [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 935.858428] env[68674]: DEBUG nova.virt.hardware [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 935.858678] env[68674]: DEBUG nova.virt.hardware [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 935.858932] env[68674]: DEBUG nova.virt.hardware [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 935.859117] env[68674]: DEBUG nova.virt.hardware [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 935.859299] env[68674]: DEBUG nova.virt.hardware [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 935.859466] env[68674]: DEBUG nova.virt.hardware [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 935.859651] env[68674]: DEBUG nova.virt.hardware [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 935.860887] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-453621a2-dfb8-4d09-86fc-883160b9b761 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.871146] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5347cb9f-dea2-4d0f-8257-07c6c1f3c4d5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.897179] env[68674]: DEBUG oslo_vmware.api [None req-ff4ddf4c-c76a-4947-81d0-f4289eac2833 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240372, 'name': CloneVM_Task} progress is 95%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.973698] env[68674]: DEBUG oslo_vmware.api [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240373, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.064611] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Releasing lock "refresh_cache-f70145c9-4846-42e1-9c1c-de9759097abd" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 936.064994] env[68674]: DEBUG nova.compute.manager [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Instance network_info: |[{"id": "7ea7b81b-2dc1-4015-9bb0-53ebf3f3c2b7", "address": "fa:16:3e:52:b1:2d", "network": {"id": "2141da47-c6b2-4270-9d0f-d999f7c26b83", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-460904531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa34d6d90c6d46aaa2cb77259b5e0c27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b9aabc7c-0f6c-42eb-bd27-493a1496c0c8", "external-id": "nsx-vlan-transportzone-368", "segmentation_id": 368, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ea7b81b-2d", "ovs_interfaceid": "7ea7b81b-2dc1-4015-9bb0-53ebf3f3c2b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 936.065382] env[68674]: DEBUG oslo_concurrency.lockutils [req-3acf0122-73c5-40e9-b433-a0155d5afe31 req-8c270ba3-e065-4540-b57d-e2f5803dc1cb service nova] Acquired lock "refresh_cache-f70145c9-4846-42e1-9c1c-de9759097abd" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 936.066037] env[68674]: DEBUG nova.network.neutron [req-3acf0122-73c5-40e9-b433-a0155d5afe31 req-8c270ba3-e065-4540-b57d-e2f5803dc1cb service nova] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Refreshing network info cache for port 7ea7b81b-2dc1-4015-9bb0-53ebf3f3c2b7 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 936.066922] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:b1:2d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b9aabc7c-0f6c-42eb-bd27-493a1496c0c8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7ea7b81b-2dc1-4015-9bb0-53ebf3f3c2b7', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 936.075225] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Creating folder: Project (aa34d6d90c6d46aaa2cb77259b5e0c27). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 936.079142] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9f60e9d4-0b8a-4d22-bca6-19fcdbc80c92 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.091508] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Created folder: Project (aa34d6d90c6d46aaa2cb77259b5e0c27) in parent group-v647377. [ 936.091508] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Creating folder: Instances. Parent ref: group-v647605. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 936.091508] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-34ce9f91-2e1c-4140-a448-5fde41c4b8e8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.103593] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Created folder: Instances in parent group-v647605. [ 936.103867] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 936.104149] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 936.104761] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dbb10f5d-1eb0-467d-ad4a-85fd72b56927 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.126131] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 936.126131] env[68674]: value = "task-3240376" [ 936.126131] env[68674]: _type = "Task" [ 936.126131] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.135158] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240376, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.279050] env[68674]: DEBUG nova.objects.instance [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lazy-loading 'numa_topology' on Instance uuid 63d6c185-db2c-4ede-a716-9a0dd432ab1f {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 936.309947] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f9512fee-5feb-49f4-8f10-d7c8d4133a82 tempest-DeleteServersAdminTestJSON-2002239449 tempest-DeleteServersAdminTestJSON-2002239449-project-admin] Lock "1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.769s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 936.325034] env[68674]: DEBUG nova.network.neutron [req-3acf0122-73c5-40e9-b433-a0155d5afe31 req-8c270ba3-e065-4540-b57d-e2f5803dc1cb service nova] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Updated VIF entry in instance network info cache for port 7ea7b81b-2dc1-4015-9bb0-53ebf3f3c2b7. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 936.325452] env[68674]: DEBUG nova.network.neutron [req-3acf0122-73c5-40e9-b433-a0155d5afe31 req-8c270ba3-e065-4540-b57d-e2f5803dc1cb service nova] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Updating instance_info_cache with network_info: [{"id": "7ea7b81b-2dc1-4015-9bb0-53ebf3f3c2b7", "address": "fa:16:3e:52:b1:2d", "network": {"id": "2141da47-c6b2-4270-9d0f-d999f7c26b83", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-460904531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa34d6d90c6d46aaa2cb77259b5e0c27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b9aabc7c-0f6c-42eb-bd27-493a1496c0c8", "external-id": "nsx-vlan-transportzone-368", "segmentation_id": 368, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ea7b81b-2d", "ovs_interfaceid": "7ea7b81b-2dc1-4015-9bb0-53ebf3f3c2b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.397692] env[68674]: DEBUG oslo_vmware.api [None req-ff4ddf4c-c76a-4947-81d0-f4289eac2833 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240372, 'name': CloneVM_Task, 'duration_secs': 1.71846} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.397967] env[68674]: INFO nova.virt.vmwareapi.vmops [None req-ff4ddf4c-c76a-4947-81d0-f4289eac2833 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Created linked-clone VM from snapshot [ 936.398725] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-613f631a-31dc-408b-8fbd-6e84f9151699 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.408867] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-ff4ddf4c-c76a-4947-81d0-f4289eac2833 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Uploading image 8c4e52a0-bb40-401b-bb68-e93ee99189b2 {{(pid=68674) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 936.424047] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff4ddf4c-c76a-4947-81d0-f4289eac2833 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Destroying the VM {{(pid=68674) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 936.424047] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-0bddf1f9-b443-47ad-9304-669768e19756 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.430931] env[68674]: DEBUG oslo_vmware.api [None req-ff4ddf4c-c76a-4947-81d0-f4289eac2833 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 936.430931] env[68674]: value = "task-3240377" [ 936.430931] env[68674]: _type = "Task" [ 936.430931] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.439454] env[68674]: DEBUG oslo_vmware.api [None req-ff4ddf4c-c76a-4947-81d0-f4289eac2833 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240377, 'name': Destroy_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.452124] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dbe27768-9a9c-4447-b176-0bd43325f053 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquiring lock "7aa58e2f-1202-4252-9c38-ce53084c573f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 936.452402] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dbe27768-9a9c-4447-b176-0bd43325f053 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lock "7aa58e2f-1202-4252-9c38-ce53084c573f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 936.452623] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dbe27768-9a9c-4447-b176-0bd43325f053 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquiring lock "7aa58e2f-1202-4252-9c38-ce53084c573f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 936.452792] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dbe27768-9a9c-4447-b176-0bd43325f053 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lock "7aa58e2f-1202-4252-9c38-ce53084c573f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 936.452994] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dbe27768-9a9c-4447-b176-0bd43325f053 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lock "7aa58e2f-1202-4252-9c38-ce53084c573f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 936.456063] env[68674]: INFO nova.compute.manager [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Rescuing [ 936.457952] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquiring lock "refresh_cache-7d953e59-53c1-4041-a641-35c12c012f7e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.458185] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquired lock "refresh_cache-7d953e59-53c1-4041-a641-35c12c012f7e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 936.458366] env[68674]: DEBUG nova.network.neutron [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 936.459746] env[68674]: INFO nova.compute.manager [None req-dbe27768-9a9c-4447-b176-0bd43325f053 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Terminating instance [ 936.475861] env[68674]: DEBUG oslo_vmware.api [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240373, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.515371} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.476050] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] ffdd1c62-1b4e-40cf-a27e-ff2877439701/ffdd1c62-1b4e-40cf-a27e-ff2877439701.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 936.477809] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 936.477809] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3a379746-2cd6-49f5-afc3-84f5ad8546e5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.485034] env[68674]: DEBUG oslo_vmware.api [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 936.485034] env[68674]: value = "task-3240378" [ 936.485034] env[68674]: _type = "Task" [ 936.485034] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.494335] env[68674]: DEBUG oslo_vmware.api [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240378, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.636137] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240376, 'name': CreateVM_Task, 'duration_secs': 0.504439} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.636321] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 936.636991] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.637177] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 936.637493] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 936.638195] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea316669-4f33-4e40-b55d-87335f2607ab {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.642629] env[68674]: DEBUG oslo_vmware.api [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 936.642629] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ae26c6-f9d5-99b4-aefb-65a661335d62" [ 936.642629] env[68674]: _type = "Task" [ 936.642629] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.658244] env[68674]: DEBUG oslo_vmware.api [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ae26c6-f9d5-99b4-aefb-65a661335d62, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.705094] env[68674]: DEBUG nova.network.neutron [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Successfully updated port: 0aaea2f0-3d87-4947-9cc4-0dffc9eddf95 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 936.781843] env[68674]: DEBUG nova.objects.base [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Object Instance<63d6c185-db2c-4ede-a716-9a0dd432ab1f> lazy-loaded attributes: resources,numa_topology {{(pid=68674) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 936.830345] env[68674]: DEBUG oslo_concurrency.lockutils [req-3acf0122-73c5-40e9-b433-a0155d5afe31 req-8c270ba3-e065-4540-b57d-e2f5803dc1cb service nova] Releasing lock "refresh_cache-f70145c9-4846-42e1-9c1c-de9759097abd" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 936.943496] env[68674]: DEBUG oslo_vmware.api [None req-ff4ddf4c-c76a-4947-81d0-f4289eac2833 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240377, 'name': Destroy_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.971373] env[68674]: DEBUG nova.compute.manager [None req-dbe27768-9a9c-4447-b176-0bd43325f053 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 936.971606] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-dbe27768-9a9c-4447-b176-0bd43325f053 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 936.972497] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ad0d0d6-64a3-444d-9eac-3e006ef0ee5f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.980160] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbe27768-9a9c-4447-b176-0bd43325f053 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 936.982435] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-49a065dc-3ac3-4e6f-91b7-63b6888e1ddc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.989762] env[68674]: DEBUG oslo_vmware.api [None req-dbe27768-9a9c-4447-b176-0bd43325f053 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 936.989762] env[68674]: value = "task-3240379" [ 936.989762] env[68674]: _type = "Task" [ 936.989762] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.997550] env[68674]: DEBUG oslo_vmware.api [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240378, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072111} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.000626] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 937.001763] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d830674-1b49-417f-8175-74a16ce27d36 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.007709] env[68674]: DEBUG oslo_vmware.api [None req-dbe27768-9a9c-4447-b176-0bd43325f053 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240379, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.027840] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] ffdd1c62-1b4e-40cf-a27e-ff2877439701/ffdd1c62-1b4e-40cf-a27e-ff2877439701.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 937.030626] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f73882bc-79d4-4133-a982-7c7a1f0481b9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.053105] env[68674]: DEBUG oslo_vmware.api [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 937.053105] env[68674]: value = "task-3240380" [ 937.053105] env[68674]: _type = "Task" [ 937.053105] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.061550] env[68674]: DEBUG oslo_vmware.api [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240380, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.136980] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55ddf838-269d-46e3-8565-387ef5eb9967 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.150556] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e2cd7d0-c103-4350-b91a-039d578424fa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.160833] env[68674]: DEBUG oslo_vmware.api [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ae26c6-f9d5-99b4-aefb-65a661335d62, 'name': SearchDatastore_Task, 'duration_secs': 0.020991} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.191118] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 937.191118] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 937.191118] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.191118] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 937.191830] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 937.192380] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-43f977c1-7ccd-44c7-9710-edd5c052b807 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.195081] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdef6b10-2a83-4eab-8402-65ecd7e7d75c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.203167] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-067ff2aa-d272-41dc-a095-49392798a047 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.209072] env[68674]: DEBUG oslo_concurrency.lockutils [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Acquiring lock "refresh_cache-33313b29-abaf-4ff7-9182-abfcfb9b3220" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.209224] env[68674]: DEBUG oslo_concurrency.lockutils [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Acquired lock "refresh_cache-33313b29-abaf-4ff7-9182-abfcfb9b3220" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 937.209382] env[68674]: DEBUG nova.network.neutron [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 937.210542] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 937.210741] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 937.212020] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-338607f0-f2de-4716-9353-db674e2d3059 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.222092] env[68674]: DEBUG nova.compute.provider_tree [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 937.227190] env[68674]: DEBUG oslo_vmware.api [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 937.227190] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d280f6-2898-f46c-0f51-bf6f454d966e" [ 937.227190] env[68674]: _type = "Task" [ 937.227190] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.235999] env[68674]: DEBUG oslo_vmware.api [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d280f6-2898-f46c-0f51-bf6f454d966e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.244867] env[68674]: DEBUG oslo_concurrency.lockutils [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Acquiring lock "691f9f14-4f53-46a4-8bf7-d027cfdd37e8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 937.245122] env[68674]: DEBUG oslo_concurrency.lockutils [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Lock "691f9f14-4f53-46a4-8bf7-d027cfdd37e8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 937.285054] env[68674]: DEBUG nova.network.neutron [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Updating instance_info_cache with network_info: [{"id": "856fc34c-4049-4185-9ab1-8f86e2cfdeff", "address": "fa:16:3e:28:1e:94", "network": {"id": "1674c138-dbec-4d03-b5b0-d1944ab38577", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-143958570-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.146", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a84d9d6e23bd40049c34e6f087252b4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap856fc34c-40", "ovs_interfaceid": "856fc34c-4049-4185-9ab1-8f86e2cfdeff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.443565] env[68674]: DEBUG oslo_vmware.api [None req-ff4ddf4c-c76a-4947-81d0-f4289eac2833 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240377, 'name': Destroy_Task, 'duration_secs': 0.89391} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.443916] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-ff4ddf4c-c76a-4947-81d0-f4289eac2833 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Destroyed the VM [ 937.444215] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ff4ddf4c-c76a-4947-81d0-f4289eac2833 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Deleting Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 937.444496] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-f8e2620c-542b-4cb9-af56-ccb65cf1e969 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.450557] env[68674]: DEBUG oslo_vmware.api [None req-ff4ddf4c-c76a-4947-81d0-f4289eac2833 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 937.450557] env[68674]: value = "task-3240381" [ 937.450557] env[68674]: _type = "Task" [ 937.450557] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.458442] env[68674]: DEBUG oslo_vmware.api [None req-ff4ddf4c-c76a-4947-81d0-f4289eac2833 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240381, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.499352] env[68674]: DEBUG oslo_vmware.api [None req-dbe27768-9a9c-4447-b176-0bd43325f053 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240379, 'name': PowerOffVM_Task, 'duration_secs': 0.183158} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.499584] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbe27768-9a9c-4447-b176-0bd43325f053 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 937.499754] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-dbe27768-9a9c-4447-b176-0bd43325f053 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 937.500017] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-96ec2ba0-5a5d-45d3-a9d5-ed6cf5547bcb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.528054] env[68674]: DEBUG nova.compute.manager [req-f71e7240-7a7a-4453-8330-777876abfee2 req-6fa9273b-7455-4ae2-a3cf-408397e9e078 service nova] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Received event network-vif-plugged-0aaea2f0-3d87-4947-9cc4-0dffc9eddf95 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 937.528272] env[68674]: DEBUG oslo_concurrency.lockutils [req-f71e7240-7a7a-4453-8330-777876abfee2 req-6fa9273b-7455-4ae2-a3cf-408397e9e078 service nova] Acquiring lock "33313b29-abaf-4ff7-9182-abfcfb9b3220-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 937.528482] env[68674]: DEBUG oslo_concurrency.lockutils [req-f71e7240-7a7a-4453-8330-777876abfee2 req-6fa9273b-7455-4ae2-a3cf-408397e9e078 service nova] Lock "33313b29-abaf-4ff7-9182-abfcfb9b3220-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 937.528658] env[68674]: DEBUG oslo_concurrency.lockutils [req-f71e7240-7a7a-4453-8330-777876abfee2 req-6fa9273b-7455-4ae2-a3cf-408397e9e078 service nova] Lock "33313b29-abaf-4ff7-9182-abfcfb9b3220-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 937.528822] env[68674]: DEBUG nova.compute.manager [req-f71e7240-7a7a-4453-8330-777876abfee2 req-6fa9273b-7455-4ae2-a3cf-408397e9e078 service nova] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] No waiting events found dispatching network-vif-plugged-0aaea2f0-3d87-4947-9cc4-0dffc9eddf95 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 937.528983] env[68674]: WARNING nova.compute.manager [req-f71e7240-7a7a-4453-8330-777876abfee2 req-6fa9273b-7455-4ae2-a3cf-408397e9e078 service nova] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Received unexpected event network-vif-plugged-0aaea2f0-3d87-4947-9cc4-0dffc9eddf95 for instance with vm_state building and task_state spawning. [ 937.529155] env[68674]: DEBUG nova.compute.manager [req-f71e7240-7a7a-4453-8330-777876abfee2 req-6fa9273b-7455-4ae2-a3cf-408397e9e078 service nova] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Received event network-changed-0aaea2f0-3d87-4947-9cc4-0dffc9eddf95 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 937.529306] env[68674]: DEBUG nova.compute.manager [req-f71e7240-7a7a-4453-8330-777876abfee2 req-6fa9273b-7455-4ae2-a3cf-408397e9e078 service nova] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Refreshing instance network info cache due to event network-changed-0aaea2f0-3d87-4947-9cc4-0dffc9eddf95. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 937.529470] env[68674]: DEBUG oslo_concurrency.lockutils [req-f71e7240-7a7a-4453-8330-777876abfee2 req-6fa9273b-7455-4ae2-a3cf-408397e9e078 service nova] Acquiring lock "refresh_cache-33313b29-abaf-4ff7-9182-abfcfb9b3220" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.559730] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-dbe27768-9a9c-4447-b176-0bd43325f053 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 937.559932] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-dbe27768-9a9c-4447-b176-0bd43325f053 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 937.560143] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbe27768-9a9c-4447-b176-0bd43325f053 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Deleting the datastore file [datastore2] 7aa58e2f-1202-4252-9c38-ce53084c573f {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 937.560434] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-30d61eea-37f2-4bd6-8e76-bd123b1c075f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.565285] env[68674]: DEBUG oslo_vmware.api [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240380, 'name': ReconfigVM_Task, 'duration_secs': 0.325887} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.565854] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Reconfigured VM instance instance-00000050 to attach disk [datastore2] ffdd1c62-1b4e-40cf-a27e-ff2877439701/ffdd1c62-1b4e-40cf-a27e-ff2877439701.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 937.566452] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-309d05e1-15d7-48a3-95e8-da0cf799262c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.569335] env[68674]: DEBUG oslo_vmware.api [None req-dbe27768-9a9c-4447-b176-0bd43325f053 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 937.569335] env[68674]: value = "task-3240383" [ 937.569335] env[68674]: _type = "Task" [ 937.569335] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.573074] env[68674]: DEBUG oslo_vmware.api [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 937.573074] env[68674]: value = "task-3240384" [ 937.573074] env[68674]: _type = "Task" [ 937.573074] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.579184] env[68674]: DEBUG oslo_vmware.api [None req-dbe27768-9a9c-4447-b176-0bd43325f053 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240383, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.583716] env[68674]: DEBUG oslo_vmware.api [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240384, 'name': Rename_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.726599] env[68674]: DEBUG nova.scheduler.client.report [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 937.740890] env[68674]: DEBUG oslo_vmware.api [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d280f6-2898-f46c-0f51-bf6f454d966e, 'name': SearchDatastore_Task, 'duration_secs': 0.009168} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.742589] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36be4217-a124-4893-9e1c-de77ad13cc33 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.747563] env[68674]: DEBUG nova.compute.manager [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 937.753283] env[68674]: DEBUG oslo_vmware.api [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 937.753283] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52327bff-04a3-353b-e6df-f23ccae8e3d3" [ 937.753283] env[68674]: _type = "Task" [ 937.753283] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.756922] env[68674]: DEBUG nova.network.neutron [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 937.765545] env[68674]: DEBUG oslo_vmware.api [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52327bff-04a3-353b-e6df-f23ccae8e3d3, 'name': SearchDatastore_Task} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.765811] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 937.766906] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] f70145c9-4846-42e1-9c1c-de9759097abd/f70145c9-4846-42e1-9c1c-de9759097abd.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 937.766906] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7ae2e2c9-5b6e-451d-a343-e85e5cec8708 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.775107] env[68674]: DEBUG oslo_vmware.api [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 937.775107] env[68674]: value = "task-3240385" [ 937.775107] env[68674]: _type = "Task" [ 937.775107] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.787678] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Releasing lock "refresh_cache-7d953e59-53c1-4041-a641-35c12c012f7e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 937.790755] env[68674]: DEBUG oslo_vmware.api [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240385, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.930531] env[68674]: DEBUG nova.network.neutron [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Updating instance_info_cache with network_info: [{"id": "0aaea2f0-3d87-4947-9cc4-0dffc9eddf95", "address": "fa:16:3e:ab:96:2e", "network": {"id": "20ba7ec2-3985-4822-87c9-232ec424b341", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-911771971-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e0e50ba46714425a9ae403a170fe8fd4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33fdc099-7497-41c1-b40c-1558937132d4", "external-id": "nsx-vlan-transportzone-764", "segmentation_id": 764, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0aaea2f0-3d", "ovs_interfaceid": "0aaea2f0-3d87-4947-9cc4-0dffc9eddf95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.963677] env[68674]: DEBUG oslo_vmware.api [None req-ff4ddf4c-c76a-4947-81d0-f4289eac2833 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240381, 'name': RemoveSnapshot_Task} progress is 96%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.083666] env[68674]: DEBUG oslo_vmware.api [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240384, 'name': Rename_Task, 'duration_secs': 0.153821} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.087374] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 938.088217] env[68674]: DEBUG oslo_vmware.api [None req-dbe27768-9a9c-4447-b176-0bd43325f053 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240383, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.186929} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.088461] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cc73dc2b-d556-4324-a8bf-12ff6975bd88 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.090353] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbe27768-9a9c-4447-b176-0bd43325f053 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 938.090514] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-dbe27768-9a9c-4447-b176-0bd43325f053 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 938.090722] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-dbe27768-9a9c-4447-b176-0bd43325f053 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 938.090928] env[68674]: INFO nova.compute.manager [None req-dbe27768-9a9c-4447-b176-0bd43325f053 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Took 1.12 seconds to destroy the instance on the hypervisor. [ 938.091210] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dbe27768-9a9c-4447-b176-0bd43325f053 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 938.091441] env[68674]: DEBUG nova.compute.manager [-] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 938.091538] env[68674]: DEBUG nova.network.neutron [-] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 938.100458] env[68674]: DEBUG oslo_vmware.api [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 938.100458] env[68674]: value = "task-3240386" [ 938.100458] env[68674]: _type = "Task" [ 938.100458] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.111913] env[68674]: DEBUG oslo_vmware.api [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240386, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.232154] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.457s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 938.234703] env[68674]: DEBUG oslo_concurrency.lockutils [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.043s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 938.234703] env[68674]: DEBUG nova.objects.instance [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Lazy-loading 'resources' on Instance uuid 040d7108-8da1-4914-b7fd-03cf09ec68aa {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 938.271739] env[68674]: DEBUG oslo_concurrency.lockutils [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.284901] env[68674]: DEBUG oslo_vmware.api [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240385, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.475622} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.285677] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] f70145c9-4846-42e1-9c1c-de9759097abd/f70145c9-4846-42e1-9c1c-de9759097abd.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 938.286064] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 938.286337] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2b75c6aa-e5ec-407f-a468-598978617350 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.292434] env[68674]: DEBUG oslo_vmware.api [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 938.292434] env[68674]: value = "task-3240387" [ 938.292434] env[68674]: _type = "Task" [ 938.292434] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.302512] env[68674]: DEBUG oslo_vmware.api [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240387, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.433683] env[68674]: DEBUG oslo_concurrency.lockutils [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Releasing lock "refresh_cache-33313b29-abaf-4ff7-9182-abfcfb9b3220" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 938.434195] env[68674]: DEBUG nova.compute.manager [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Instance network_info: |[{"id": "0aaea2f0-3d87-4947-9cc4-0dffc9eddf95", "address": "fa:16:3e:ab:96:2e", "network": {"id": "20ba7ec2-3985-4822-87c9-232ec424b341", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-911771971-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e0e50ba46714425a9ae403a170fe8fd4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33fdc099-7497-41c1-b40c-1558937132d4", "external-id": "nsx-vlan-transportzone-764", "segmentation_id": 764, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0aaea2f0-3d", "ovs_interfaceid": "0aaea2f0-3d87-4947-9cc4-0dffc9eddf95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 938.434533] env[68674]: DEBUG oslo_concurrency.lockutils [req-f71e7240-7a7a-4453-8330-777876abfee2 req-6fa9273b-7455-4ae2-a3cf-408397e9e078 service nova] Acquired lock "refresh_cache-33313b29-abaf-4ff7-9182-abfcfb9b3220" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 938.434724] env[68674]: DEBUG nova.network.neutron [req-f71e7240-7a7a-4453-8330-777876abfee2 req-6fa9273b-7455-4ae2-a3cf-408397e9e078 service nova] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Refreshing network info cache for port 0aaea2f0-3d87-4947-9cc4-0dffc9eddf95 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 938.435960] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ab:96:2e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '33fdc099-7497-41c1-b40c-1558937132d4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0aaea2f0-3d87-4947-9cc4-0dffc9eddf95', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 938.443869] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Creating folder: Project (e0e50ba46714425a9ae403a170fe8fd4). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 938.447014] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f71522c9-907b-4422-83f1-e5f8bbe561c1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.458297] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Created folder: Project (e0e50ba46714425a9ae403a170fe8fd4) in parent group-v647377. [ 938.458481] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Creating folder: Instances. Parent ref: group-v647608. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 938.461441] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4aac046b-3c53-46e2-be14-ec3fccab0e0d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.462933] env[68674]: DEBUG oslo_vmware.api [None req-ff4ddf4c-c76a-4947-81d0-f4289eac2833 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240381, 'name': RemoveSnapshot_Task, 'duration_secs': 0.778584} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.463961] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ff4ddf4c-c76a-4947-81d0-f4289eac2833 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Deleted Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 938.472443] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Created folder: Instances in parent group-v647608. [ 938.472641] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 938.472853] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 938.473084] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-be7a8212-b763-44b3-92fa-baa2d3ac2dc0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.498023] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 938.498023] env[68674]: value = "task-3240390" [ 938.498023] env[68674]: _type = "Task" [ 938.498023] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.505692] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240390, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.612062] env[68674]: DEBUG oslo_vmware.api [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240386, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.708860] env[68674]: DEBUG nova.network.neutron [req-f71e7240-7a7a-4453-8330-777876abfee2 req-6fa9273b-7455-4ae2-a3cf-408397e9e078 service nova] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Updated VIF entry in instance network info cache for port 0aaea2f0-3d87-4947-9cc4-0dffc9eddf95. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 938.709269] env[68674]: DEBUG nova.network.neutron [req-f71e7240-7a7a-4453-8330-777876abfee2 req-6fa9273b-7455-4ae2-a3cf-408397e9e078 service nova] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Updating instance_info_cache with network_info: [{"id": "0aaea2f0-3d87-4947-9cc4-0dffc9eddf95", "address": "fa:16:3e:ab:96:2e", "network": {"id": "20ba7ec2-3985-4822-87c9-232ec424b341", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-911771971-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e0e50ba46714425a9ae403a170fe8fd4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33fdc099-7497-41c1-b40c-1558937132d4", "external-id": "nsx-vlan-transportzone-764", "segmentation_id": 764, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0aaea2f0-3d", "ovs_interfaceid": "0aaea2f0-3d87-4947-9cc4-0dffc9eddf95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.744295] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d4c0c725-1daf-4ccd-a3a6-f0764ec5b1ee tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "63d6c185-db2c-4ede-a716-9a0dd432ab1f" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 49.429s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 938.745178] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "63d6c185-db2c-4ede-a716-9a0dd432ab1f" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 25.613s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 938.745378] env[68674]: INFO nova.compute.manager [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Unshelving [ 938.803236] env[68674]: DEBUG oslo_vmware.api [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240387, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068538} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.803506] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 938.804501] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49032344-5345-4248-afc5-06e41643eaa7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.829662] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] f70145c9-4846-42e1-9c1c-de9759097abd/f70145c9-4846-42e1-9c1c-de9759097abd.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 938.834242] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1947ccd6-bd7d-4d4b-a16b-1cbc70c5398b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.856063] env[68674]: DEBUG oslo_vmware.api [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 938.856063] env[68674]: value = "task-3240391" [ 938.856063] env[68674]: _type = "Task" [ 938.856063] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.601333] env[68674]: DEBUG nova.network.neutron [-] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.603243] env[68674]: WARNING nova.compute.manager [None req-ff4ddf4c-c76a-4947-81d0-f4289eac2833 tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Image not found during snapshot: nova.exception.ImageNotFound: Image 8c4e52a0-bb40-401b-bb68-e93ee99189b2 could not be found. [ 939.604375] env[68674]: DEBUG oslo_concurrency.lockutils [req-f71e7240-7a7a-4453-8330-777876abfee2 req-6fa9273b-7455-4ae2-a3cf-408397e9e078 service nova] Releasing lock "refresh_cache-33313b29-abaf-4ff7-9182-abfcfb9b3220" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 939.606600] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 939.617921] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6d06bfc2-dda4-4ce4-a1ad-d42dcd69bff1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.621070] env[68674]: DEBUG nova.compute.manager [req-31fbea24-616a-47d9-8fd0-11fccfa9d0b6 req-714b52f5-48bb-4c02-b391-949152459624 service nova] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Received event network-vif-deleted-d24d2f1b-cc82-45a9-8d5c-94505a4de39f {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 939.621070] env[68674]: INFO nova.compute.manager [req-31fbea24-616a-47d9-8fd0-11fccfa9d0b6 req-714b52f5-48bb-4c02-b391-949152459624 service nova] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Neutron deleted interface d24d2f1b-cc82-45a9-8d5c-94505a4de39f; detaching it from the instance and deleting it from the info cache [ 939.621263] env[68674]: DEBUG nova.network.neutron [req-31fbea24-616a-47d9-8fd0-11fccfa9d0b6 req-714b52f5-48bb-4c02-b391-949152459624 service nova] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.626687] env[68674]: DEBUG oslo_vmware.api [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240391, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.626876] env[68674]: WARNING oslo_vmware.common.loopingcall [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] task run outlasted interval by 0.270705 sec [ 939.639746] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240390, 'name': CreateVM_Task, 'duration_secs': 0.359754} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.640087] env[68674]: DEBUG oslo_vmware.api [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 939.640087] env[68674]: value = "task-3240392" [ 939.640087] env[68674]: _type = "Task" [ 939.640087] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.640324] env[68674]: DEBUG oslo_vmware.api [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240386, 'name': PowerOnVM_Task, 'duration_secs': 0.518112} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.643260] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 939.643626] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 939.643881] env[68674]: INFO nova.compute.manager [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Took 8.86 seconds to spawn the instance on the hypervisor. [ 939.644118] env[68674]: DEBUG nova.compute.manager [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 939.648898] env[68674]: DEBUG oslo_concurrency.lockutils [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.648898] env[68674]: DEBUG oslo_concurrency.lockutils [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 939.648995] env[68674]: DEBUG oslo_concurrency.lockutils [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 939.652608] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ba5a6df-4f6f-4eff-911b-33c6a8cb3fd9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.656492] env[68674]: DEBUG oslo_vmware.api [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240391, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.658956] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bdf11834-a2c3-45fa-91a0-9cd4c34f2f6e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.666196] env[68674]: DEBUG oslo_vmware.api [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240392, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.668442] env[68674]: DEBUG oslo_vmware.api [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Waiting for the task: (returnval){ [ 939.668442] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d7df05-8c4c-1521-f904-9aaa1383aea1" [ 939.668442] env[68674]: _type = "Task" [ 939.668442] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.681815] env[68674]: DEBUG oslo_vmware.api [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d7df05-8c4c-1521-f904-9aaa1383aea1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.851892] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-783b8ef7-4fcd-43be-b8fb-ed4fd8d3ce9b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.859186] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d20534e3-264b-4a00-9574-dea88b7816fa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.891744] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ec90fde-5423-4e20-95bb-0905564e72b9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.899456] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-024443ea-8963-4af1-8d15-fe860c5eadb8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.913996] env[68674]: DEBUG nova.compute.provider_tree [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 940.111123] env[68674]: INFO nova.compute.manager [-] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Took 2.02 seconds to deallocate network for instance. [ 940.124717] env[68674]: DEBUG nova.compute.utils [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 940.126141] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-71c0a46b-1643-4d4e-b137-f3bfbaf32f0f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.128855] env[68674]: DEBUG oslo_concurrency.lockutils [None req-56b0d8fa-da46-4c8a-a377-990178beedcc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "e684ec31-b5d9-458c-bbba-36ada7f275bd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 940.129619] env[68674]: DEBUG oslo_concurrency.lockutils [None req-56b0d8fa-da46-4c8a-a377-990178beedcc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "e684ec31-b5d9-458c-bbba-36ada7f275bd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 940.129619] env[68674]: DEBUG oslo_concurrency.lockutils [None req-56b0d8fa-da46-4c8a-a377-990178beedcc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "e684ec31-b5d9-458c-bbba-36ada7f275bd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 940.129619] env[68674]: DEBUG oslo_concurrency.lockutils [None req-56b0d8fa-da46-4c8a-a377-990178beedcc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "e684ec31-b5d9-458c-bbba-36ada7f275bd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 940.129820] env[68674]: DEBUG oslo_concurrency.lockutils [None req-56b0d8fa-da46-4c8a-a377-990178beedcc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "e684ec31-b5d9-458c-bbba-36ada7f275bd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 940.134714] env[68674]: INFO nova.compute.manager [None req-56b0d8fa-da46-4c8a-a377-990178beedcc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Terminating instance [ 940.146173] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa274c8b-c84a-4e64-ace2-e38c423b5044 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.157455] env[68674]: DEBUG oslo_vmware.api [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240391, 'name': ReconfigVM_Task, 'duration_secs': 1.059941} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.157856] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Reconfigured VM instance instance-00000051 to attach disk [datastore2] f70145c9-4846-42e1-9c1c-de9759097abd/f70145c9-4846-42e1-9c1c-de9759097abd.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 940.161966] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-66cdf58a-acee-423c-a5bb-411028eb4c22 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.168356] env[68674]: DEBUG oslo_vmware.api [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240392, 'name': PowerOffVM_Task, 'duration_secs': 0.462803} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.169460] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 940.170047] env[68674]: DEBUG oslo_vmware.api [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 940.170047] env[68674]: value = "task-3240393" [ 940.170047] env[68674]: _type = "Task" [ 940.170047] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.170892] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2713c082-31c1-4d70-bcd5-acb254466638 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.210278] env[68674]: DEBUG nova.compute.manager [req-31fbea24-616a-47d9-8fd0-11fccfa9d0b6 req-714b52f5-48bb-4c02-b391-949152459624 service nova] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Detach interface failed, port_id=d24d2f1b-cc82-45a9-8d5c-94505a4de39f, reason: Instance 7aa58e2f-1202-4252-9c38-ce53084c573f could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 940.212996] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-facfb2c2-62cc-464f-b8b1-00db40c2d9fd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.215754] env[68674]: INFO nova.compute.manager [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Took 35.54 seconds to build instance. [ 940.220127] env[68674]: DEBUG oslo_vmware.api [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240393, 'name': Rename_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.225032] env[68674]: DEBUG oslo_vmware.api [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d7df05-8c4c-1521-f904-9aaa1383aea1, 'name': SearchDatastore_Task, 'duration_secs': 0.023407} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.228994] env[68674]: DEBUG oslo_concurrency.lockutils [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 940.228994] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 940.228994] env[68674]: DEBUG oslo_concurrency.lockutils [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.228994] env[68674]: DEBUG oslo_concurrency.lockutils [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 940.228994] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 940.230495] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b1c7b478-8c9d-4136-b79c-92dffda1c65c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.243434] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 940.243612] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 940.244401] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f23a22df-27cd-4d37-9233-a698a8234c0d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.251965] env[68674]: DEBUG oslo_vmware.api [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Waiting for the task: (returnval){ [ 940.251965] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d57492-2f27-aff8-12c4-5831b112bfaa" [ 940.251965] env[68674]: _type = "Task" [ 940.251965] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.257020] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 940.257149] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a5e646f3-3069-4dc8-8bab-4feea2bff394 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.264137] env[68674]: DEBUG oslo_vmware.api [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d57492-2f27-aff8-12c4-5831b112bfaa, 'name': SearchDatastore_Task, 'duration_secs': 0.008129} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.265863] env[68674]: DEBUG oslo_vmware.api [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 940.265863] env[68674]: value = "task-3240394" [ 940.265863] env[68674]: _type = "Task" [ 940.265863] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.266069] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31993a13-2080-4d7d-9b8f-d0acacc12c0c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.274602] env[68674]: DEBUG oslo_vmware.api [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Waiting for the task: (returnval){ [ 940.274602] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]529c1ef8-6bc3-cb0b-c052-8688b2f96fe7" [ 940.274602] env[68674]: _type = "Task" [ 940.274602] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.281914] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] VM already powered off {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 940.281914] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 940.281914] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.281914] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 940.281914] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 940.281914] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-73740f2e-18e7-42ab-9c74-3f574bb91c54 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.289219] env[68674]: DEBUG oslo_vmware.api [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]529c1ef8-6bc3-cb0b-c052-8688b2f96fe7, 'name': SearchDatastore_Task, 'duration_secs': 0.008955} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.290278] env[68674]: DEBUG oslo_concurrency.lockutils [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 940.290523] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 33313b29-abaf-4ff7-9182-abfcfb9b3220/33313b29-abaf-4ff7-9182-abfcfb9b3220.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 940.290780] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 940.292019] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 940.292019] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bcb39b52-8513-41e9-b0ff-b0575b1d0085 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.293469] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-812d987a-3bd6-4992-8bc5-079d0cd08b73 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.298686] env[68674]: DEBUG oslo_vmware.api [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 940.298686] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c26b48-2688-1649-6416-71738d303052" [ 940.298686] env[68674]: _type = "Task" [ 940.298686] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.303957] env[68674]: DEBUG oslo_vmware.api [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Waiting for the task: (returnval){ [ 940.303957] env[68674]: value = "task-3240395" [ 940.303957] env[68674]: _type = "Task" [ 940.303957] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.310688] env[68674]: DEBUG oslo_vmware.api [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c26b48-2688-1649-6416-71738d303052, 'name': SearchDatastore_Task} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.311815] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-feaf9f70-4c27-4d5c-aeec-200057f91a84 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.316542] env[68674]: DEBUG oslo_vmware.api [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Task: {'id': task-3240395, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.319402] env[68674]: DEBUG oslo_vmware.api [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 940.319402] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5277bb6c-011f-5a78-7e57-9e08b0076272" [ 940.319402] env[68674]: _type = "Task" [ 940.319402] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.327139] env[68674]: DEBUG oslo_vmware.api [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5277bb6c-011f-5a78-7e57-9e08b0076272, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.416889] env[68674]: DEBUG nova.scheduler.client.report [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 940.618424] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dbe27768-9a9c-4447-b176-0bd43325f053 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 940.631563] env[68674]: INFO nova.virt.block_device [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Booting with volume 268d6a73-58d5-4541-bfb5-15e693956d5f at /dev/sdb [ 940.640806] env[68674]: DEBUG nova.compute.manager [None req-56b0d8fa-da46-4c8a-a377-990178beedcc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 940.641398] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-56b0d8fa-da46-4c8a-a377-990178beedcc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 940.642323] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da141153-5ec4-4a60-aa74-3cd715424a9d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.653519] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-56b0d8fa-da46-4c8a-a377-990178beedcc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 940.653866] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bfc37869-6578-439b-b3ae-43f4de1c8610 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.664348] env[68674]: DEBUG oslo_vmware.api [None req-56b0d8fa-da46-4c8a-a377-990178beedcc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 940.664348] env[68674]: value = "task-3240396" [ 940.664348] env[68674]: _type = "Task" [ 940.664348] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.678094] env[68674]: DEBUG oslo_vmware.api [None req-56b0d8fa-da46-4c8a-a377-990178beedcc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240396, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.682991] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4dc32db5-f827-4f02-a430-4ca978cddac8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.690996] env[68674]: DEBUG oslo_vmware.api [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240393, 'name': Rename_Task, 'duration_secs': 0.147768} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.692321] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 940.693025] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-941355a3-be44-44d4-a510-fcfeef963663 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.697308] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e9530f6-3939-4b03-ab1d-37f5bad55a06 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.715816] env[68674]: DEBUG oslo_vmware.api [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 940.715816] env[68674]: value = "task-3240397" [ 940.715816] env[68674]: _type = "Task" [ 940.715816] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.724035] env[68674]: DEBUG oslo_concurrency.lockutils [None req-591b6508-b1e0-40b4-b650-77d2ded40393 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "ffdd1c62-1b4e-40cf-a27e-ff2877439701" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.060s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 940.724329] env[68674]: DEBUG oslo_vmware.api [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240397, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.735989] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2e7c7830-d7cb-4239-94ab-dd79509bedd8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.744649] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3060f8f-499f-45f4-abd3-e0d22b0bbf24 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.777023] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22d24c1b-d472-4b22-b03c-f2cbf4ff4bcb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.782816] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76105750-e701-4370-bf7f-cab21d4a27be {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.796120] env[68674]: DEBUG nova.virt.block_device [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Updating existing volume attachment record: 537fb4ee-1f3e-47c4-9a07-7126b0159a5c {{(pid=68674) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 940.814557] env[68674]: DEBUG oslo_vmware.api [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Task: {'id': task-3240395, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.405727} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.814821] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 33313b29-abaf-4ff7-9182-abfcfb9b3220/33313b29-abaf-4ff7-9182-abfcfb9b3220.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 940.815044] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 940.815322] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b2a02c77-41a6-4327-a205-122d9585327f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.824437] env[68674]: DEBUG oslo_vmware.api [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Waiting for the task: (returnval){ [ 940.824437] env[68674]: value = "task-3240398" [ 940.824437] env[68674]: _type = "Task" [ 940.824437] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.831572] env[68674]: DEBUG oslo_vmware.api [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5277bb6c-011f-5a78-7e57-9e08b0076272, 'name': SearchDatastore_Task, 'duration_secs': 0.007956} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.832340] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 940.832616] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 7d953e59-53c1-4041-a641-35c12c012f7e/b84d9354-ef6b-46ca-9dae-6549fa89bbea-rescue.vmdk. {{(pid=68674) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 940.832894] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e118afcb-2f06-413b-b53a-715c05280f84 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.837978] env[68674]: DEBUG oslo_vmware.api [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Task: {'id': task-3240398, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.843143] env[68674]: DEBUG oslo_vmware.api [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 940.843143] env[68674]: value = "task-3240399" [ 940.843143] env[68674]: _type = "Task" [ 940.843143] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.850300] env[68674]: DEBUG oslo_vmware.api [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240399, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.926264] env[68674]: DEBUG oslo_concurrency.lockutils [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.692s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 940.928804] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.596s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 940.930365] env[68674]: INFO nova.compute.claims [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 940.949768] env[68674]: INFO nova.scheduler.client.report [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Deleted allocations for instance 040d7108-8da1-4914-b7fd-03cf09ec68aa [ 941.174996] env[68674]: DEBUG oslo_vmware.api [None req-56b0d8fa-da46-4c8a-a377-990178beedcc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240396, 'name': PowerOffVM_Task, 'duration_secs': 0.203577} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.174996] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-56b0d8fa-da46-4c8a-a377-990178beedcc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 941.175163] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-56b0d8fa-da46-4c8a-a377-990178beedcc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 941.176010] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2694bd29-f374-4bcc-b80a-e24b10f87414 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.226232] env[68674]: DEBUG oslo_vmware.api [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240397, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.242389] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-56b0d8fa-da46-4c8a-a377-990178beedcc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 941.242588] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-56b0d8fa-da46-4c8a-a377-990178beedcc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 941.242740] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-56b0d8fa-da46-4c8a-a377-990178beedcc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Deleting the datastore file [datastore2] e684ec31-b5d9-458c-bbba-36ada7f275bd {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 941.243088] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-20da4677-80c7-45b5-818f-87ce23fd8d36 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.250354] env[68674]: DEBUG oslo_vmware.api [None req-56b0d8fa-da46-4c8a-a377-990178beedcc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for the task: (returnval){ [ 941.250354] env[68674]: value = "task-3240404" [ 941.250354] env[68674]: _type = "Task" [ 941.250354] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.260553] env[68674]: DEBUG oslo_vmware.api [None req-56b0d8fa-da46-4c8a-a377-990178beedcc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240404, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.336710] env[68674]: DEBUG oslo_vmware.api [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Task: {'id': task-3240398, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061321} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.337041] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 941.338076] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83c68858-9d7a-4032-aedc-a498ca21f6ae {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.362308] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] 33313b29-abaf-4ff7-9182-abfcfb9b3220/33313b29-abaf-4ff7-9182-abfcfb9b3220.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 941.366226] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a53eae6-8524-45e1-a45a-31b4626006aa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.389306] env[68674]: DEBUG oslo_vmware.api [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240399, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.391166] env[68674]: DEBUG oslo_vmware.api [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Waiting for the task: (returnval){ [ 941.391166] env[68674]: value = "task-3240405" [ 941.391166] env[68674]: _type = "Task" [ 941.391166] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.436274] env[68674]: DEBUG nova.compute.manager [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Stashing vm_state: active {{(pid=68674) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 941.458851] env[68674]: DEBUG oslo_concurrency.lockutils [None req-64d603e7-787b-472b-b18f-372662e85bfb tempest-ServerShowV247Test-1663113196 tempest-ServerShowV247Test-1663113196-project-member] Lock "040d7108-8da1-4914-b7fd-03cf09ec68aa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.557s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 941.727285] env[68674]: DEBUG oslo_vmware.api [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240397, 'name': PowerOnVM_Task, 'duration_secs': 0.8061} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.727653] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 941.727793] env[68674]: INFO nova.compute.manager [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Took 8.46 seconds to spawn the instance on the hypervisor. [ 941.727974] env[68674]: DEBUG nova.compute.manager [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 941.729061] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ee90c74-d5fa-47d8-ac21-2775de93ff20 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.760613] env[68674]: DEBUG oslo_vmware.api [None req-56b0d8fa-da46-4c8a-a377-990178beedcc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Task: {'id': task-3240404, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.334831} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.761598] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-56b0d8fa-da46-4c8a-a377-990178beedcc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 941.761598] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-56b0d8fa-da46-4c8a-a377-990178beedcc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 941.761598] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-56b0d8fa-da46-4c8a-a377-990178beedcc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 941.761598] env[68674]: INFO nova.compute.manager [None req-56b0d8fa-da46-4c8a-a377-990178beedcc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Took 1.12 seconds to destroy the instance on the hypervisor. [ 941.761904] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-56b0d8fa-da46-4c8a-a377-990178beedcc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 941.762117] env[68674]: DEBUG nova.compute.manager [-] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 941.762213] env[68674]: DEBUG nova.network.neutron [-] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 941.869291] env[68674]: DEBUG oslo_vmware.api [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240399, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.709487} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.869616] env[68674]: INFO nova.virt.vmwareapi.ds_util [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 7d953e59-53c1-4041-a641-35c12c012f7e/b84d9354-ef6b-46ca-9dae-6549fa89bbea-rescue.vmdk. [ 941.870513] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0d430d5-4969-4cba-9000-e212bba9fd85 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.906897] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] 7d953e59-53c1-4041-a641-35c12c012f7e/b84d9354-ef6b-46ca-9dae-6549fa89bbea-rescue.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 941.912283] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d1353d9e-47cf-48e6-b783-94fefe3af47b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.929969] env[68674]: DEBUG oslo_vmware.api [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Task: {'id': task-3240405, 'name': ReconfigVM_Task, 'duration_secs': 0.324764} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.931324] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Reconfigured VM instance instance-00000052 to attach disk [datastore2] 33313b29-abaf-4ff7-9182-abfcfb9b3220/33313b29-abaf-4ff7-9182-abfcfb9b3220.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 941.932036] env[68674]: DEBUG oslo_vmware.api [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 941.932036] env[68674]: value = "task-3240406" [ 941.932036] env[68674]: _type = "Task" [ 941.932036] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.932234] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cd5496d9-ba25-4d15-9546-070d68a7181f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.954903] env[68674]: DEBUG oslo_vmware.api [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240406, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.955303] env[68674]: DEBUG oslo_vmware.api [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Waiting for the task: (returnval){ [ 941.955303] env[68674]: value = "task-3240407" [ 941.955303] env[68674]: _type = "Task" [ 941.955303] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.967441] env[68674]: DEBUG oslo_vmware.api [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Task: {'id': task-3240407, 'name': Rename_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.975775] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 942.166607] env[68674]: DEBUG nova.compute.manager [req-b7373dd4-977a-4bcb-8121-8a9eb8332305 req-5f0991aa-25a0-4249-9aad-1de611f96fe2 service nova] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Received event network-vif-deleted-f3cfd0df-ecd9-4b39-a765-31cf7a9bf316 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 942.166607] env[68674]: INFO nova.compute.manager [req-b7373dd4-977a-4bcb-8121-8a9eb8332305 req-5f0991aa-25a0-4249-9aad-1de611f96fe2 service nova] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Neutron deleted interface f3cfd0df-ecd9-4b39-a765-31cf7a9bf316; detaching it from the instance and deleting it from the info cache [ 942.166607] env[68674]: DEBUG nova.network.neutron [req-b7373dd4-977a-4bcb-8121-8a9eb8332305 req-5f0991aa-25a0-4249-9aad-1de611f96fe2 service nova] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.248112] env[68674]: INFO nova.compute.manager [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Took 35.94 seconds to build instance. [ 942.320670] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d53ab6b0-781e-4fcc-87dc-002d1e808dc0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.328810] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65899e7b-dc77-4fc7-9a44-f57049bf52f0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.361352] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ee51e49-351d-40f8-bbf5-ec32b0e3f8cb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.369557] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3f568d5-4e04-4ebd-9fb6-abf90622249e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.384197] env[68674]: DEBUG nova.compute.provider_tree [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 942.444320] env[68674]: DEBUG oslo_vmware.api [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240406, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.465247] env[68674]: DEBUG oslo_vmware.api [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Task: {'id': task-3240407, 'name': Rename_Task, 'duration_secs': 0.430672} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.465527] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 942.465766] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4ae9eff5-9c1d-48bb-bf4b-39df7fd3108c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.472834] env[68674]: DEBUG oslo_vmware.api [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Waiting for the task: (returnval){ [ 942.472834] env[68674]: value = "task-3240408" [ 942.472834] env[68674]: _type = "Task" [ 942.472834] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.480603] env[68674]: DEBUG oslo_vmware.api [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Task: {'id': task-3240408, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.586074] env[68674]: DEBUG nova.network.neutron [-] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.671229] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8b20e60e-f8e1-4ba4-9f4d-8aab84fad670 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.680310] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb05168f-81d9-4a1f-983a-9983f279cc7e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.711735] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0815b961-6505-472c-a784-d4c5eed69669 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "interface-0e7c5243-ad33-4391-8977-b9019643e3de-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 942.712011] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0815b961-6505-472c-a784-d4c5eed69669 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "interface-0e7c5243-ad33-4391-8977-b9019643e3de-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 942.712381] env[68674]: DEBUG nova.objects.instance [None req-0815b961-6505-472c-a784-d4c5eed69669 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lazy-loading 'flavor' on Instance uuid 0e7c5243-ad33-4391-8977-b9019643e3de {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 942.713769] env[68674]: DEBUG nova.compute.manager [req-b7373dd4-977a-4bcb-8121-8a9eb8332305 req-5f0991aa-25a0-4249-9aad-1de611f96fe2 service nova] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Detach interface failed, port_id=f3cfd0df-ecd9-4b39-a765-31cf7a9bf316, reason: Instance e684ec31-b5d9-458c-bbba-36ada7f275bd could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 942.750204] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2afb8007-2de6-4cb8-b071-963eb44ec2c4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "f70145c9-4846-42e1-9c1c-de9759097abd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.450s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 942.887607] env[68674]: DEBUG nova.scheduler.client.report [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 942.946182] env[68674]: DEBUG oslo_vmware.api [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240406, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.982295] env[68674]: DEBUG oslo_vmware.api [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Task: {'id': task-3240408, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.090860] env[68674]: INFO nova.compute.manager [-] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Took 1.33 seconds to deallocate network for instance. [ 943.219352] env[68674]: DEBUG nova.objects.instance [None req-0815b961-6505-472c-a784-d4c5eed69669 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lazy-loading 'pci_requests' on Instance uuid 0e7c5243-ad33-4391-8977-b9019643e3de {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 943.395274] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.465s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.395274] env[68674]: DEBUG nova.compute.manager [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 943.399164] env[68674]: DEBUG oslo_concurrency.lockutils [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.731s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 943.402136] env[68674]: INFO nova.compute.claims [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 943.446718] env[68674]: DEBUG oslo_vmware.api [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240406, 'name': ReconfigVM_Task, 'duration_secs': 1.14014} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.447027] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Reconfigured VM instance instance-00000048 to attach disk [datastore1] 7d953e59-53c1-4041-a641-35c12c012f7e/b84d9354-ef6b-46ca-9dae-6549fa89bbea-rescue.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 943.447934] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4e5325e-c664-40e1-8f81-7ad071148174 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.476572] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2fe7ea6b-6e18-4f7a-b149-ef23edd91267 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.495575] env[68674]: DEBUG oslo_vmware.api [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Task: {'id': task-3240408, 'name': PowerOnVM_Task, 'duration_secs': 0.65784} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.496906] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 943.497155] env[68674]: INFO nova.compute.manager [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Took 7.67 seconds to spawn the instance on the hypervisor. [ 943.497506] env[68674]: DEBUG nova.compute.manager [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 943.497668] env[68674]: DEBUG oslo_vmware.api [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 943.497668] env[68674]: value = "task-3240410" [ 943.497668] env[68674]: _type = "Task" [ 943.497668] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.498323] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c35cac90-6d61-4f35-b3a7-dc2551ef8410 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.513660] env[68674]: DEBUG oslo_vmware.api [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240410, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.600696] env[68674]: DEBUG oslo_concurrency.lockutils [None req-56b0d8fa-da46-4c8a-a377-990178beedcc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 943.721923] env[68674]: DEBUG nova.objects.base [None req-0815b961-6505-472c-a784-d4c5eed69669 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Object Instance<0e7c5243-ad33-4391-8977-b9019643e3de> lazy-loaded attributes: flavor,pci_requests {{(pid=68674) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 943.722180] env[68674]: DEBUG nova.network.neutron [None req-0815b961-6505-472c-a784-d4c5eed69669 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 943.837126] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0815b961-6505-472c-a784-d4c5eed69669 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "interface-0e7c5243-ad33-4391-8977-b9019643e3de-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.125s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.910025] env[68674]: DEBUG nova.compute.utils [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 943.911115] env[68674]: DEBUG nova.compute.manager [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 943.911300] env[68674]: DEBUG nova.network.neutron [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 943.979824] env[68674]: DEBUG nova.policy [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '13d0534e46384c27b10c6984acea9abb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e7610acc88b54ded8e6ce52dfa76591d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 944.011666] env[68674]: DEBUG oslo_vmware.api [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240410, 'name': ReconfigVM_Task, 'duration_secs': 0.248348} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.011968] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 944.013634] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2c83f43e-913e-4a3c-b124-b7f067f9abae {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.026447] env[68674]: DEBUG oslo_vmware.api [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 944.026447] env[68674]: value = "task-3240411" [ 944.026447] env[68674]: _type = "Task" [ 944.026447] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.026967] env[68674]: INFO nova.compute.manager [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Took 36.38 seconds to build instance. [ 944.038377] env[68674]: DEBUG oslo_vmware.api [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240411, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.354836] env[68674]: DEBUG nova.network.neutron [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Successfully created port: 8ede585c-c152-4f0a-8740-215268e36b27 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 944.416198] env[68674]: DEBUG nova.compute.manager [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 944.533307] env[68674]: DEBUG oslo_concurrency.lockutils [None req-eb9cb9d2-dd48-4f9f-b8e4-66e0d8dc829e tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Lock "33313b29-abaf-4ff7-9182-abfcfb9b3220" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.898s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 944.537201] env[68674]: DEBUG oslo_vmware.api [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240411, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.734610] env[68674]: DEBUG nova.compute.manager [req-3f07a302-0ec8-4392-abdb-e624d66fcd97 req-1be220d4-5984-49e4-ba81-441a00faceb2 service nova] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Received event network-changed-7ea7b81b-2dc1-4015-9bb0-53ebf3f3c2b7 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 944.734840] env[68674]: DEBUG nova.compute.manager [req-3f07a302-0ec8-4392-abdb-e624d66fcd97 req-1be220d4-5984-49e4-ba81-441a00faceb2 service nova] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Refreshing instance network info cache due to event network-changed-7ea7b81b-2dc1-4015-9bb0-53ebf3f3c2b7. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 944.735118] env[68674]: DEBUG oslo_concurrency.lockutils [req-3f07a302-0ec8-4392-abdb-e624d66fcd97 req-1be220d4-5984-49e4-ba81-441a00faceb2 service nova] Acquiring lock "refresh_cache-f70145c9-4846-42e1-9c1c-de9759097abd" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.735264] env[68674]: DEBUG oslo_concurrency.lockutils [req-3f07a302-0ec8-4392-abdb-e624d66fcd97 req-1be220d4-5984-49e4-ba81-441a00faceb2 service nova] Acquired lock "refresh_cache-f70145c9-4846-42e1-9c1c-de9759097abd" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 944.735464] env[68674]: DEBUG nova.network.neutron [req-3f07a302-0ec8-4392-abdb-e624d66fcd97 req-1be220d4-5984-49e4-ba81-441a00faceb2 service nova] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Refreshing network info cache for port 7ea7b81b-2dc1-4015-9bb0-53ebf3f3c2b7 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 944.785024] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a9b5418-e514-4bf9-88c7-ba67146705fe {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.793401] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38565203-3a54-4264-a894-2cda0b37613e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.826269] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fca86386-0389-40a5-a641-99b2e55dadc6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.833964] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9df33080-6e3a-4b10-be5e-1e10fb7ef1da {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.850121] env[68674]: DEBUG nova.compute.provider_tree [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 945.038065] env[68674]: DEBUG oslo_vmware.api [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240411, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.354170] env[68674]: DEBUG nova.scheduler.client.report [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 945.426580] env[68674]: DEBUG nova.compute.manager [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 945.455962] env[68674]: DEBUG nova.virt.hardware [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 945.456241] env[68674]: DEBUG nova.virt.hardware [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 945.456398] env[68674]: DEBUG nova.virt.hardware [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 945.456584] env[68674]: DEBUG nova.virt.hardware [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 945.456732] env[68674]: DEBUG nova.virt.hardware [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 945.456881] env[68674]: DEBUG nova.virt.hardware [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 945.457119] env[68674]: DEBUG nova.virt.hardware [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 945.457284] env[68674]: DEBUG nova.virt.hardware [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 945.457799] env[68674]: DEBUG nova.virt.hardware [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 945.457799] env[68674]: DEBUG nova.virt.hardware [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 945.457799] env[68674]: DEBUG nova.virt.hardware [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 945.458715] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3fd4b86-9650-4fd0-9ea2-42dc7182490d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.467512] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d48982c-d031-46bf-9304-e1e49789bd78 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.484634] env[68674]: DEBUG nova.network.neutron [req-3f07a302-0ec8-4392-abdb-e624d66fcd97 req-1be220d4-5984-49e4-ba81-441a00faceb2 service nova] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Updated VIF entry in instance network info cache for port 7ea7b81b-2dc1-4015-9bb0-53ebf3f3c2b7. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 945.485057] env[68674]: DEBUG nova.network.neutron [req-3f07a302-0ec8-4392-abdb-e624d66fcd97 req-1be220d4-5984-49e4-ba81-441a00faceb2 service nova] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Updating instance_info_cache with network_info: [{"id": "7ea7b81b-2dc1-4015-9bb0-53ebf3f3c2b7", "address": "fa:16:3e:52:b1:2d", "network": {"id": "2141da47-c6b2-4270-9d0f-d999f7c26b83", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-460904531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa34d6d90c6d46aaa2cb77259b5e0c27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b9aabc7c-0f6c-42eb-bd27-493a1496c0c8", "external-id": "nsx-vlan-transportzone-368", "segmentation_id": 368, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ea7b81b-2d", "ovs_interfaceid": "7ea7b81b-2dc1-4015-9bb0-53ebf3f3c2b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.542776] env[68674]: DEBUG oslo_vmware.api [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240411, 'name': PowerOnVM_Task, 'duration_secs': 1.458049} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.543259] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 945.547342] env[68674]: DEBUG nova.compute.manager [None req-c878d173-99b6-4c06-82a2-343f0f66aa86 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 945.548642] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d83e8cb0-20f6-4f4c-9cbe-58dc7ef9b694 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.860792] env[68674]: DEBUG oslo_concurrency.lockutils [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.462s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 945.861455] env[68674]: DEBUG nova.compute.manager [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 945.863998] env[68674]: DEBUG oslo_concurrency.lockutils [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "interface-0e7c5243-ad33-4391-8977-b9019643e3de-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 945.864261] env[68674]: DEBUG oslo_concurrency.lockutils [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "interface-0e7c5243-ad33-4391-8977-b9019643e3de-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 945.864578] env[68674]: DEBUG nova.objects.instance [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lazy-loading 'flavor' on Instance uuid 0e7c5243-ad33-4391-8977-b9019643e3de {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 945.866223] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9d8243b3-71eb-4c55-b7cf-b3c18483eb3d tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.764s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 945.866431] env[68674]: DEBUG nova.objects.instance [None req-9d8243b3-71eb-4c55-b7cf-b3c18483eb3d tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Lazy-loading 'resources' on Instance uuid 0eaf7d72-755b-4977-8f71-7d53ad1cf573 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 945.928914] env[68674]: DEBUG nova.network.neutron [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Successfully updated port: 8ede585c-c152-4f0a-8740-215268e36b27 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 945.987765] env[68674]: DEBUG oslo_concurrency.lockutils [req-3f07a302-0ec8-4392-abdb-e624d66fcd97 req-1be220d4-5984-49e4-ba81-441a00faceb2 service nova] Releasing lock "refresh_cache-f70145c9-4846-42e1-9c1c-de9759097abd" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 946.367771] env[68674]: DEBUG nova.compute.utils [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 946.369196] env[68674]: DEBUG nova.compute.manager [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 946.369374] env[68674]: DEBUG nova.network.neutron [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 946.403980] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 946.430967] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Acquiring lock "refresh_cache-082fd3a5-b30e-41cc-8fba-dab2802a1e3e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.431193] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Acquired lock "refresh_cache-082fd3a5-b30e-41cc-8fba-dab2802a1e3e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 946.431377] env[68674]: DEBUG nova.network.neutron [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 946.436297] env[68674]: DEBUG nova.policy [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dcea4f18771342a8b9065cc5ace886f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3ad20d6acb5b40b9a13ec33b1a1a6758', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 946.491569] env[68674]: DEBUG nova.objects.instance [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lazy-loading 'pci_requests' on Instance uuid 0e7c5243-ad33-4391-8977-b9019643e3de {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 946.720268] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-886dd1b4-1fe0-44f2-b883-6d2143bd1bf6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.729221] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a65c0106-b71b-4363-bdc9-ba86c237cbff {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.763014] env[68674]: DEBUG nova.network.neutron [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Successfully created port: 68f46874-a1b2-4e2f-ab13-1dd822565a4e {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 946.765477] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0487988c-5a43-4535-8d3d-79b96b042179 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.774701] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c9bc881-8a63-4ed9-95a0-28a571c9a2ba {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.787755] env[68674]: DEBUG nova.compute.provider_tree [None req-9d8243b3-71eb-4c55-b7cf-b3c18483eb3d tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 946.875240] env[68674]: DEBUG nova.compute.manager [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 946.997168] env[68674]: DEBUG nova.objects.base [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Object Instance<0e7c5243-ad33-4391-8977-b9019643e3de> lazy-loaded attributes: flavor,pci_requests {{(pid=68674) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 946.997412] env[68674]: DEBUG nova.network.neutron [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 947.029056] env[68674]: DEBUG nova.compute.manager [req-c6371637-39eb-4dda-9940-d0e776bd46b1 req-e117ee96-246d-45f9-a061-6fba157e7327 service nova] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Received event network-vif-plugged-8ede585c-c152-4f0a-8740-215268e36b27 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 947.029286] env[68674]: DEBUG oslo_concurrency.lockutils [req-c6371637-39eb-4dda-9940-d0e776bd46b1 req-e117ee96-246d-45f9-a061-6fba157e7327 service nova] Acquiring lock "082fd3a5-b30e-41cc-8fba-dab2802a1e3e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 947.029489] env[68674]: DEBUG oslo_concurrency.lockutils [req-c6371637-39eb-4dda-9940-d0e776bd46b1 req-e117ee96-246d-45f9-a061-6fba157e7327 service nova] Lock "082fd3a5-b30e-41cc-8fba-dab2802a1e3e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 947.029653] env[68674]: DEBUG oslo_concurrency.lockutils [req-c6371637-39eb-4dda-9940-d0e776bd46b1 req-e117ee96-246d-45f9-a061-6fba157e7327 service nova] Lock "082fd3a5-b30e-41cc-8fba-dab2802a1e3e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 947.029817] env[68674]: DEBUG nova.compute.manager [req-c6371637-39eb-4dda-9940-d0e776bd46b1 req-e117ee96-246d-45f9-a061-6fba157e7327 service nova] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] No waiting events found dispatching network-vif-plugged-8ede585c-c152-4f0a-8740-215268e36b27 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 947.029980] env[68674]: WARNING nova.compute.manager [req-c6371637-39eb-4dda-9940-d0e776bd46b1 req-e117ee96-246d-45f9-a061-6fba157e7327 service nova] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Received unexpected event network-vif-plugged-8ede585c-c152-4f0a-8740-215268e36b27 for instance with vm_state building and task_state spawning. [ 947.030150] env[68674]: DEBUG nova.compute.manager [req-c6371637-39eb-4dda-9940-d0e776bd46b1 req-e117ee96-246d-45f9-a061-6fba157e7327 service nova] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Received event network-changed-8ede585c-c152-4f0a-8740-215268e36b27 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 947.030310] env[68674]: DEBUG nova.compute.manager [req-c6371637-39eb-4dda-9940-d0e776bd46b1 req-e117ee96-246d-45f9-a061-6fba157e7327 service nova] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Refreshing instance network info cache due to event network-changed-8ede585c-c152-4f0a-8740-215268e36b27. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 947.030451] env[68674]: DEBUG oslo_concurrency.lockutils [req-c6371637-39eb-4dda-9940-d0e776bd46b1 req-e117ee96-246d-45f9-a061-6fba157e7327 service nova] Acquiring lock "refresh_cache-082fd3a5-b30e-41cc-8fba-dab2802a1e3e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.103245] env[68674]: DEBUG nova.network.neutron [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 947.139382] env[68674]: DEBUG nova.policy [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fd6c4d1912754a2ea44a65b455b7413c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '21163cbc3a5a4dc3abc832c4560c33e2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 947.292023] env[68674]: DEBUG nova.scheduler.client.report [None req-9d8243b3-71eb-4c55-b7cf-b3c18483eb3d tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 947.401020] env[68674]: DEBUG nova.network.neutron [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Updating instance_info_cache with network_info: [{"id": "8ede585c-c152-4f0a-8740-215268e36b27", "address": "fa:16:3e:21:bf:34", "network": {"id": "a5dbc02e-9db7-422c-af9d-980b23766ae6", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-614683180-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e7610acc88b54ded8e6ce52dfa76591d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1cf14cf-4f9c-41af-90d0-62e363eb4fba", "external-id": "nsx-vlan-transportzone-521", "segmentation_id": 521, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ede585c-c1", "ovs_interfaceid": "8ede585c-c152-4f0a-8740-215268e36b27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.637424] env[68674]: DEBUG nova.network.neutron [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Successfully created port: 861f9feb-a46d-4b29-851a-f958bd80ef86 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 947.799052] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9d8243b3-71eb-4c55-b7cf-b3c18483eb3d tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.933s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 947.801798] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.517s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 947.803372] env[68674]: INFO nova.compute.claims [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 947.823745] env[68674]: INFO nova.scheduler.client.report [None req-9d8243b3-71eb-4c55-b7cf-b3c18483eb3d tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Deleted allocations for instance 0eaf7d72-755b-4977-8f71-7d53ad1cf573 [ 947.889618] env[68674]: DEBUG nova.compute.manager [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 947.906881] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Releasing lock "refresh_cache-082fd3a5-b30e-41cc-8fba-dab2802a1e3e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 947.908330] env[68674]: DEBUG nova.compute.manager [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Instance network_info: |[{"id": "8ede585c-c152-4f0a-8740-215268e36b27", "address": "fa:16:3e:21:bf:34", "network": {"id": "a5dbc02e-9db7-422c-af9d-980b23766ae6", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-614683180-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e7610acc88b54ded8e6ce52dfa76591d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1cf14cf-4f9c-41af-90d0-62e363eb4fba", "external-id": "nsx-vlan-transportzone-521", "segmentation_id": 521, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ede585c-c1", "ovs_interfaceid": "8ede585c-c152-4f0a-8740-215268e36b27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 947.910946] env[68674]: DEBUG oslo_concurrency.lockutils [req-c6371637-39eb-4dda-9940-d0e776bd46b1 req-e117ee96-246d-45f9-a061-6fba157e7327 service nova] Acquired lock "refresh_cache-082fd3a5-b30e-41cc-8fba-dab2802a1e3e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 947.910946] env[68674]: DEBUG nova.network.neutron [req-c6371637-39eb-4dda-9940-d0e776bd46b1 req-e117ee96-246d-45f9-a061-6fba157e7327 service nova] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Refreshing network info cache for port 8ede585c-c152-4f0a-8740-215268e36b27 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 947.912497] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:bf:34', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1cf14cf-4f9c-41af-90d0-62e363eb4fba', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8ede585c-c152-4f0a-8740-215268e36b27', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 947.920866] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Creating folder: Project (e7610acc88b54ded8e6ce52dfa76591d). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 947.924345] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7e8f0288-a659-4e7a-b24e-1d26f18528c4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.930189] env[68674]: DEBUG nova.virt.hardware [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 947.931039] env[68674]: DEBUG nova.virt.hardware [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 947.931039] env[68674]: DEBUG nova.virt.hardware [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 947.931173] env[68674]: DEBUG nova.virt.hardware [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 947.931285] env[68674]: DEBUG nova.virt.hardware [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 947.931510] env[68674]: DEBUG nova.virt.hardware [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 947.931827] env[68674]: DEBUG nova.virt.hardware [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 947.932167] env[68674]: DEBUG nova.virt.hardware [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 947.932449] env[68674]: DEBUG nova.virt.hardware [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 947.933057] env[68674]: DEBUG nova.virt.hardware [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 947.933677] env[68674]: DEBUG nova.virt.hardware [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 947.936096] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6fc576d-febd-4c4b-9d16-b9e7faa7a830 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.946901] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0b6e298-8a94-439c-9b83-b98caa707da5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.962172] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Created folder: Project (e7610acc88b54ded8e6ce52dfa76591d) in parent group-v647377. [ 947.962382] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Creating folder: Instances. Parent ref: group-v647613. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 947.966172] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-224ff2d9-fadd-4a24-b8e9-e55d06a80090 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.976021] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Created folder: Instances in parent group-v647613. [ 947.976021] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 947.976021] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 947.976021] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e366e703-8202-4de3-8660-80b39c344803 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.994619] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 947.994619] env[68674]: value = "task-3240414" [ 947.994619] env[68674]: _type = "Task" [ 947.994619] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.003903] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240414, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.054874] env[68674]: INFO nova.compute.manager [None req-bc9247be-087b-4c61-956c-4ac9fb1fa0ad tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Unrescuing [ 948.055845] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bc9247be-087b-4c61-956c-4ac9fb1fa0ad tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquiring lock "refresh_cache-7d953e59-53c1-4041-a641-35c12c012f7e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.055845] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bc9247be-087b-4c61-956c-4ac9fb1fa0ad tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquired lock "refresh_cache-7d953e59-53c1-4041-a641-35c12c012f7e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 948.055845] env[68674]: DEBUG nova.network.neutron [None req-bc9247be-087b-4c61-956c-4ac9fb1fa0ad tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 948.136890] env[68674]: DEBUG nova.network.neutron [req-c6371637-39eb-4dda-9940-d0e776bd46b1 req-e117ee96-246d-45f9-a061-6fba157e7327 service nova] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Updated VIF entry in instance network info cache for port 8ede585c-c152-4f0a-8740-215268e36b27. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 948.137330] env[68674]: DEBUG nova.network.neutron [req-c6371637-39eb-4dda-9940-d0e776bd46b1 req-e117ee96-246d-45f9-a061-6fba157e7327 service nova] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Updating instance_info_cache with network_info: [{"id": "8ede585c-c152-4f0a-8740-215268e36b27", "address": "fa:16:3e:21:bf:34", "network": {"id": "a5dbc02e-9db7-422c-af9d-980b23766ae6", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-614683180-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e7610acc88b54ded8e6ce52dfa76591d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1cf14cf-4f9c-41af-90d0-62e363eb4fba", "external-id": "nsx-vlan-transportzone-521", "segmentation_id": 521, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ede585c-c1", "ovs_interfaceid": "8ede585c-c152-4f0a-8740-215268e36b27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.332430] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9d8243b3-71eb-4c55-b7cf-b3c18483eb3d tempest-ServersTestJSON-808914722 tempest-ServersTestJSON-808914722-project-member] Lock "0eaf7d72-755b-4977-8f71-7d53ad1cf573" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.196s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 948.506130] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240414, 'name': CreateVM_Task, 'duration_secs': 0.403921} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.506337] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 948.507044] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.507216] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 948.507598] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 948.507866] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16cbdef4-1bea-4756-855b-3db86234b95d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.512632] env[68674]: DEBUG oslo_vmware.api [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Waiting for the task: (returnval){ [ 948.512632] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5294fd8d-516c-6180-8e47-6a10eac7f491" [ 948.512632] env[68674]: _type = "Task" [ 948.512632] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.521248] env[68674]: DEBUG oslo_vmware.api [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5294fd8d-516c-6180-8e47-6a10eac7f491, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.641119] env[68674]: DEBUG oslo_concurrency.lockutils [req-c6371637-39eb-4dda-9940-d0e776bd46b1 req-e117ee96-246d-45f9-a061-6fba157e7327 service nova] Releasing lock "refresh_cache-082fd3a5-b30e-41cc-8fba-dab2802a1e3e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 948.916816] env[68674]: DEBUG nova.network.neutron [None req-bc9247be-087b-4c61-956c-4ac9fb1fa0ad tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Updating instance_info_cache with network_info: [{"id": "856fc34c-4049-4185-9ab1-8f86e2cfdeff", "address": "fa:16:3e:28:1e:94", "network": {"id": "1674c138-dbec-4d03-b5b0-d1944ab38577", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-143958570-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.146", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a84d9d6e23bd40049c34e6f087252b4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap856fc34c-40", "ovs_interfaceid": "856fc34c-4049-4185-9ab1-8f86e2cfdeff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.987521] env[68674]: DEBUG nova.network.neutron [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Successfully updated port: 68f46874-a1b2-4e2f-ab13-1dd822565a4e {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 949.032816] env[68674]: DEBUG oslo_vmware.api [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5294fd8d-516c-6180-8e47-6a10eac7f491, 'name': SearchDatastore_Task, 'duration_secs': 0.013681} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.033327] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 949.033434] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 949.033620] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.033760] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 949.033939] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 949.034218] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e31c1876-94f8-49c6-aac3-f6ab7b7c2bd1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.043568] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 949.044202] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 949.044549] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-961b4b48-fe80-4f7c-afe7-33fd7b8a237c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.050977] env[68674]: DEBUG oslo_vmware.api [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Waiting for the task: (returnval){ [ 949.050977] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b6638c-bbd2-8bb6-ca82-8724d0506d84" [ 949.050977] env[68674]: _type = "Task" [ 949.050977] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.065319] env[68674]: DEBUG oslo_vmware.api [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b6638c-bbd2-8bb6-ca82-8724d0506d84, 'name': SearchDatastore_Task, 'duration_secs': 0.009787} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.066231] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68065db8-040a-4804-8073-67a8cae58fc7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.075525] env[68674]: DEBUG oslo_vmware.api [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Waiting for the task: (returnval){ [ 949.075525] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52386785-3dea-08bb-c3fb-41f31a0238b5" [ 949.075525] env[68674]: _type = "Task" [ 949.075525] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.088040] env[68674]: DEBUG oslo_vmware.api [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52386785-3dea-08bb-c3fb-41f31a0238b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.168940] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fa1e050-8db4-4ec6-8046-48ab698ec919 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.176068] env[68674]: DEBUG nova.network.neutron [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Successfully updated port: 861f9feb-a46d-4b29-851a-f958bd80ef86 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 949.177905] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b4623c0-b282-4a03-8f68-a23fc922cc48 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.211129] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dddb144e-cd19-4d30-b7fe-e8b7d82ba152 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.219696] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da897521-bab0-42ad-82fe-070573bd299c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.234150] env[68674]: DEBUG nova.compute.provider_tree [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 949.319816] env[68674]: DEBUG nova.compute.manager [req-fb8ce5bb-f0d7-49d2-be92-c07aafec5671 req-de5b8763-bc14-4359-b1e8-378045bfda05 service nova] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Received event network-vif-plugged-68f46874-a1b2-4e2f-ab13-1dd822565a4e {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 949.319917] env[68674]: DEBUG oslo_concurrency.lockutils [req-fb8ce5bb-f0d7-49d2-be92-c07aafec5671 req-de5b8763-bc14-4359-b1e8-378045bfda05 service nova] Acquiring lock "e894cd36-95c8-473b-9bbd-483f11fb5add-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 949.320151] env[68674]: DEBUG oslo_concurrency.lockutils [req-fb8ce5bb-f0d7-49d2-be92-c07aafec5671 req-de5b8763-bc14-4359-b1e8-378045bfda05 service nova] Lock "e894cd36-95c8-473b-9bbd-483f11fb5add-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 949.320320] env[68674]: DEBUG oslo_concurrency.lockutils [req-fb8ce5bb-f0d7-49d2-be92-c07aafec5671 req-de5b8763-bc14-4359-b1e8-378045bfda05 service nova] Lock "e894cd36-95c8-473b-9bbd-483f11fb5add-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 949.320494] env[68674]: DEBUG nova.compute.manager [req-fb8ce5bb-f0d7-49d2-be92-c07aafec5671 req-de5b8763-bc14-4359-b1e8-378045bfda05 service nova] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] No waiting events found dispatching network-vif-plugged-68f46874-a1b2-4e2f-ab13-1dd822565a4e {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 949.320661] env[68674]: WARNING nova.compute.manager [req-fb8ce5bb-f0d7-49d2-be92-c07aafec5671 req-de5b8763-bc14-4359-b1e8-378045bfda05 service nova] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Received unexpected event network-vif-plugged-68f46874-a1b2-4e2f-ab13-1dd822565a4e for instance with vm_state building and task_state spawning. [ 949.320823] env[68674]: DEBUG nova.compute.manager [req-fb8ce5bb-f0d7-49d2-be92-c07aafec5671 req-de5b8763-bc14-4359-b1e8-378045bfda05 service nova] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Received event network-changed-68f46874-a1b2-4e2f-ab13-1dd822565a4e {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 949.320975] env[68674]: DEBUG nova.compute.manager [req-fb8ce5bb-f0d7-49d2-be92-c07aafec5671 req-de5b8763-bc14-4359-b1e8-378045bfda05 service nova] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Refreshing instance network info cache due to event network-changed-68f46874-a1b2-4e2f-ab13-1dd822565a4e. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 949.321173] env[68674]: DEBUG oslo_concurrency.lockutils [req-fb8ce5bb-f0d7-49d2-be92-c07aafec5671 req-de5b8763-bc14-4359-b1e8-378045bfda05 service nova] Acquiring lock "refresh_cache-e894cd36-95c8-473b-9bbd-483f11fb5add" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.321310] env[68674]: DEBUG oslo_concurrency.lockutils [req-fb8ce5bb-f0d7-49d2-be92-c07aafec5671 req-de5b8763-bc14-4359-b1e8-378045bfda05 service nova] Acquired lock "refresh_cache-e894cd36-95c8-473b-9bbd-483f11fb5add" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 949.321466] env[68674]: DEBUG nova.network.neutron [req-fb8ce5bb-f0d7-49d2-be92-c07aafec5671 req-de5b8763-bc14-4359-b1e8-378045bfda05 service nova] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Refreshing network info cache for port 68f46874-a1b2-4e2f-ab13-1dd822565a4e {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 949.419707] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bc9247be-087b-4c61-956c-4ac9fb1fa0ad tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Releasing lock "refresh_cache-7d953e59-53c1-4041-a641-35c12c012f7e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 949.420689] env[68674]: DEBUG nova.objects.instance [None req-bc9247be-087b-4c61-956c-4ac9fb1fa0ad tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lazy-loading 'flavor' on Instance uuid 7d953e59-53c1-4041-a641-35c12c012f7e {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 949.489982] env[68674]: DEBUG oslo_concurrency.lockutils [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Acquiring lock "refresh_cache-e894cd36-95c8-473b-9bbd-483f11fb5add" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.588790] env[68674]: DEBUG oslo_vmware.api [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52386785-3dea-08bb-c3fb-41f31a0238b5, 'name': SearchDatastore_Task, 'duration_secs': 0.012196} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.589025] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 949.589287] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 082fd3a5-b30e-41cc-8fba-dab2802a1e3e/082fd3a5-b30e-41cc-8fba-dab2802a1e3e.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 949.589543] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dcdae29b-dbfe-4be3-a85b-f984d58733e2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.597335] env[68674]: DEBUG oslo_vmware.api [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Waiting for the task: (returnval){ [ 949.597335] env[68674]: value = "task-3240415" [ 949.597335] env[68674]: _type = "Task" [ 949.597335] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.605522] env[68674]: DEBUG oslo_vmware.api [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Task: {'id': task-3240415, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.685064] env[68674]: DEBUG oslo_concurrency.lockutils [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "refresh_cache-0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.685064] env[68674]: DEBUG oslo_concurrency.lockutils [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquired lock "refresh_cache-0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 949.685064] env[68674]: DEBUG nova.network.neutron [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 949.737575] env[68674]: DEBUG nova.scheduler.client.report [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 949.793020] env[68674]: DEBUG oslo_concurrency.lockutils [None req-90dde390-96cc-4651-aa04-5dee38987e6b tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Acquiring lock "33313b29-abaf-4ff7-9182-abfcfb9b3220" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 949.793020] env[68674]: DEBUG oslo_concurrency.lockutils [None req-90dde390-96cc-4651-aa04-5dee38987e6b tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Lock "33313b29-abaf-4ff7-9182-abfcfb9b3220" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 949.793020] env[68674]: DEBUG oslo_concurrency.lockutils [None req-90dde390-96cc-4651-aa04-5dee38987e6b tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Acquiring lock "33313b29-abaf-4ff7-9182-abfcfb9b3220-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 949.793020] env[68674]: DEBUG oslo_concurrency.lockutils [None req-90dde390-96cc-4651-aa04-5dee38987e6b tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Lock "33313b29-abaf-4ff7-9182-abfcfb9b3220-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 949.793020] env[68674]: DEBUG oslo_concurrency.lockutils [None req-90dde390-96cc-4651-aa04-5dee38987e6b tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Lock "33313b29-abaf-4ff7-9182-abfcfb9b3220-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 949.794866] env[68674]: INFO nova.compute.manager [None req-90dde390-96cc-4651-aa04-5dee38987e6b tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Terminating instance [ 949.867213] env[68674]: DEBUG nova.network.neutron [req-fb8ce5bb-f0d7-49d2-be92-c07aafec5671 req-de5b8763-bc14-4359-b1e8-378045bfda05 service nova] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 949.927693] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84e1113b-5dcd-4d0d-8825-d8aa69c231c2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.958623] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc9247be-087b-4c61-956c-4ac9fb1fa0ad tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 949.961627] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f9c5c072-89dd-4945-8b05-54563439a21b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.969399] env[68674]: DEBUG oslo_vmware.api [None req-bc9247be-087b-4c61-956c-4ac9fb1fa0ad tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 949.969399] env[68674]: value = "task-3240416" [ 949.969399] env[68674]: _type = "Task" [ 949.969399] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.985625] env[68674]: DEBUG oslo_vmware.api [None req-bc9247be-087b-4c61-956c-4ac9fb1fa0ad tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240416, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.031936] env[68674]: DEBUG nova.network.neutron [req-fb8ce5bb-f0d7-49d2-be92-c07aafec5671 req-de5b8763-bc14-4359-b1e8-378045bfda05 service nova] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.113272] env[68674]: DEBUG oslo_vmware.api [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Task: {'id': task-3240415, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.243909] env[68674]: WARNING nova.network.neutron [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] a803f1d7-ea36-4d0a-9a85-9b7a8d27f698 already exists in list: networks containing: ['a803f1d7-ea36-4d0a-9a85-9b7a8d27f698']. ignoring it [ 950.246744] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.445s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.247306] env[68674]: DEBUG nova.compute.manager [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 950.249875] env[68674]: DEBUG oslo_concurrency.lockutils [None req-78c4d3df-0db8-4e17-bfc5-df75502c4891 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.461s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.250115] env[68674]: DEBUG nova.objects.instance [None req-78c4d3df-0db8-4e17-bfc5-df75502c4891 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Lazy-loading 'resources' on Instance uuid c4fd04a7-2b11-4c4b-84d1-53edc1e3f035 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 950.302169] env[68674]: DEBUG nova.compute.manager [None req-90dde390-96cc-4651-aa04-5dee38987e6b tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 950.302393] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-90dde390-96cc-4651-aa04-5dee38987e6b tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 950.304016] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bc0f95c-9929-4356-bfbc-34eac74458a7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.312428] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-90dde390-96cc-4651-aa04-5dee38987e6b tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 950.312683] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-064e7952-3676-4341-8c3c-1f5c17be706e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.320021] env[68674]: DEBUG oslo_vmware.api [None req-90dde390-96cc-4651-aa04-5dee38987e6b tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Waiting for the task: (returnval){ [ 950.320021] env[68674]: value = "task-3240417" [ 950.320021] env[68674]: _type = "Task" [ 950.320021] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.332937] env[68674]: DEBUG oslo_vmware.api [None req-90dde390-96cc-4651-aa04-5dee38987e6b tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Task: {'id': task-3240417, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.483090] env[68674]: DEBUG oslo_vmware.api [None req-bc9247be-087b-4c61-956c-4ac9fb1fa0ad tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240416, 'name': PowerOffVM_Task, 'duration_secs': 0.448118} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.483418] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc9247be-087b-4c61-956c-4ac9fb1fa0ad tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 950.490253] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc9247be-087b-4c61-956c-4ac9fb1fa0ad tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Reconfiguring VM instance instance-00000048 to detach disk 2002 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 950.490617] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ee9ed060-b14d-4cc6-a4fc-4df238e0f66f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.509551] env[68674]: DEBUG oslo_vmware.api [None req-bc9247be-087b-4c61-956c-4ac9fb1fa0ad tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 950.509551] env[68674]: value = "task-3240418" [ 950.509551] env[68674]: _type = "Task" [ 950.509551] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.524725] env[68674]: DEBUG oslo_vmware.api [None req-bc9247be-087b-4c61-956c-4ac9fb1fa0ad tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240418, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.534574] env[68674]: DEBUG oslo_concurrency.lockutils [req-fb8ce5bb-f0d7-49d2-be92-c07aafec5671 req-de5b8763-bc14-4359-b1e8-378045bfda05 service nova] Releasing lock "refresh_cache-e894cd36-95c8-473b-9bbd-483f11fb5add" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 950.534926] env[68674]: DEBUG oslo_concurrency.lockutils [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Acquired lock "refresh_cache-e894cd36-95c8-473b-9bbd-483f11fb5add" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 950.536416] env[68674]: DEBUG nova.network.neutron [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 950.612144] env[68674]: DEBUG oslo_vmware.api [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Task: {'id': task-3240415, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.643373} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.612144] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 082fd3a5-b30e-41cc-8fba-dab2802a1e3e/082fd3a5-b30e-41cc-8fba-dab2802a1e3e.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 950.612144] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 950.612144] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ef582746-a702-4d2d-b29c-9c1f6692714f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.617499] env[68674]: DEBUG oslo_vmware.api [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Waiting for the task: (returnval){ [ 950.617499] env[68674]: value = "task-3240419" [ 950.617499] env[68674]: _type = "Task" [ 950.617499] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.626329] env[68674]: DEBUG oslo_vmware.api [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Task: {'id': task-3240419, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.679657] env[68674]: DEBUG nova.network.neutron [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Updating instance_info_cache with network_info: [{"id": "7d9b4902-f03b-4046-b4ba-0bc1296918da", "address": "fa:16:3e:17:ab:03", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.195", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d9b4902-f0", "ovs_interfaceid": "7d9b4902-f03b-4046-b4ba-0bc1296918da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "861f9feb-a46d-4b29-851a-f958bd80ef86", "address": "fa:16:3e:a7:8f:46", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap861f9feb-a4", "ovs_interfaceid": "861f9feb-a46d-4b29-851a-f958bd80ef86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.757259] env[68674]: DEBUG nova.compute.utils [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 950.758658] env[68674]: DEBUG nova.compute.manager [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 950.758843] env[68674]: DEBUG nova.network.neutron [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 950.812877] env[68674]: DEBUG nova.policy [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f940b7cc23c64aacb65c919815ca8eb7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '973b07f90402492594d128e1e1d2e915', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 950.839451] env[68674]: DEBUG oslo_vmware.api [None req-90dde390-96cc-4651-aa04-5dee38987e6b tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Task: {'id': task-3240417, 'name': PowerOffVM_Task, 'duration_secs': 0.200197} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.839755] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-90dde390-96cc-4651-aa04-5dee38987e6b tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 950.839887] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-90dde390-96cc-4651-aa04-5dee38987e6b tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 950.840150] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6bfe4dad-8ffd-4567-b030-68c3a34cbc4a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.908136] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-90dde390-96cc-4651-aa04-5dee38987e6b tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 950.908295] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-90dde390-96cc-4651-aa04-5dee38987e6b tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 950.908620] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-90dde390-96cc-4651-aa04-5dee38987e6b tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Deleting the datastore file [datastore2] 33313b29-abaf-4ff7-9182-abfcfb9b3220 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 950.908710] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dbde7a6e-8a89-4940-a5c0-9a4b18b03f68 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.916357] env[68674]: DEBUG oslo_vmware.api [None req-90dde390-96cc-4651-aa04-5dee38987e6b tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Waiting for the task: (returnval){ [ 950.916357] env[68674]: value = "task-3240421" [ 950.916357] env[68674]: _type = "Task" [ 950.916357] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.928638] env[68674]: DEBUG oslo_vmware.api [None req-90dde390-96cc-4651-aa04-5dee38987e6b tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Task: {'id': task-3240421, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.022829] env[68674]: DEBUG oslo_vmware.api [None req-bc9247be-087b-4c61-956c-4ac9fb1fa0ad tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240418, 'name': ReconfigVM_Task, 'duration_secs': 0.288943} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.023136] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc9247be-087b-4c61-956c-4ac9fb1fa0ad tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Reconfigured VM instance instance-00000048 to detach disk 2002 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 951.023357] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc9247be-087b-4c61-956c-4ac9fb1fa0ad tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 951.023607] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e8b32deb-fe8c-4703-8054-6a430382e29b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.029987] env[68674]: DEBUG oslo_vmware.api [None req-bc9247be-087b-4c61-956c-4ac9fb1fa0ad tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 951.029987] env[68674]: value = "task-3240422" [ 951.029987] env[68674]: _type = "Task" [ 951.029987] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.039865] env[68674]: DEBUG oslo_vmware.api [None req-bc9247be-087b-4c61-956c-4ac9fb1fa0ad tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240422, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.083511] env[68674]: DEBUG nova.network.neutron [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 951.101734] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ce67eb4-59da-4d13-9380-3dd25363d418 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.108771] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa11be03-a53a-44a8-a59e-e27e93c604e0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.157986] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acaf30de-1241-4a8b-89d6-4b9d1c1681d9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.168651] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfc5800b-3e58-41de-8f58-bbe3ec98837d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.176046] env[68674]: DEBUG oslo_vmware.api [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Task: {'id': task-3240419, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082658} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.176510] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 951.177676] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45152371-af07-4dee-9c1c-c419c7ede353 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.187942] env[68674]: DEBUG oslo_concurrency.lockutils [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Releasing lock "refresh_cache-0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 951.188564] env[68674]: DEBUG oslo_concurrency.lockutils [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.188721] env[68674]: DEBUG oslo_concurrency.lockutils [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquired lock "0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 951.189184] env[68674]: DEBUG nova.compute.provider_tree [None req-78c4d3df-0db8-4e17-bfc5-df75502c4891 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 951.192053] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46654483-ee88-4377-a712-c73a85a03aec {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.211831] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] 082fd3a5-b30e-41cc-8fba-dab2802a1e3e/082fd3a5-b30e-41cc-8fba-dab2802a1e3e.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 951.215773] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d7f3c3b-1852-49a6-966c-4f19087027f8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.231267] env[68674]: DEBUG nova.network.neutron [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Successfully created port: 06217b92-0ccd-4eaf-be24-4bbd6e81f3a3 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 951.246039] env[68674]: DEBUG nova.virt.hardware [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 951.246039] env[68674]: DEBUG nova.virt.hardware [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 951.246039] env[68674]: DEBUG nova.virt.hardware [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 951.246039] env[68674]: DEBUG nova.virt.hardware [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 951.246039] env[68674]: DEBUG nova.virt.hardware [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 951.246039] env[68674]: DEBUG nova.virt.hardware [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 951.246287] env[68674]: DEBUG nova.virt.hardware [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 951.246415] env[68674]: DEBUG nova.virt.hardware [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 951.246582] env[68674]: DEBUG nova.virt.hardware [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 951.246746] env[68674]: DEBUG nova.virt.hardware [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 951.246981] env[68674]: DEBUG nova.virt.hardware [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 951.254926] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Reconfiguring VM to attach interface {{(pid=68674) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 951.254926] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8ad4bba-001c-447b-9012-fac9b5db4b7b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.269098] env[68674]: DEBUG nova.compute.manager [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 951.272236] env[68674]: DEBUG oslo_vmware.api [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Waiting for the task: (returnval){ [ 951.272236] env[68674]: value = "task-3240423" [ 951.272236] env[68674]: _type = "Task" [ 951.272236] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.277591] env[68674]: DEBUG oslo_vmware.api [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 951.277591] env[68674]: value = "task-3240424" [ 951.277591] env[68674]: _type = "Task" [ 951.277591] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.284190] env[68674]: DEBUG oslo_vmware.api [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Task: {'id': task-3240423, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.288895] env[68674]: DEBUG oslo_vmware.api [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240424, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.351347] env[68674]: DEBUG nova.network.neutron [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Updating instance_info_cache with network_info: [{"id": "68f46874-a1b2-4e2f-ab13-1dd822565a4e", "address": "fa:16:3e:76:02:d7", "network": {"id": "2fe936d1-11af-436b-b09f-487e47a2a601", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1998689030-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ad20d6acb5b40b9a13ec33b1a1a6758", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "777870ab-362f-4a17-9c1c-8d9cc26cd4ce", "external-id": "nsx-vlan-transportzone-987", "segmentation_id": 987, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68f46874-a1", "ovs_interfaceid": "68f46874-a1b2-4e2f-ab13-1dd822565a4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.429502] env[68674]: DEBUG oslo_vmware.api [None req-90dde390-96cc-4651-aa04-5dee38987e6b tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Task: {'id': task-3240421, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.543320] env[68674]: DEBUG oslo_vmware.api [None req-bc9247be-087b-4c61-956c-4ac9fb1fa0ad tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240422, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.587331] env[68674]: DEBUG nova.compute.manager [req-d69d3d4d-0d47-435e-83f2-121bc4074a04 req-f6d5eefd-0fa4-41e9-aaa4-9ba443bce1fe service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Received event network-vif-plugged-861f9feb-a46d-4b29-851a-f958bd80ef86 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 951.587580] env[68674]: DEBUG oslo_concurrency.lockutils [req-d69d3d4d-0d47-435e-83f2-121bc4074a04 req-f6d5eefd-0fa4-41e9-aaa4-9ba443bce1fe service nova] Acquiring lock "0e7c5243-ad33-4391-8977-b9019643e3de-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 951.587764] env[68674]: DEBUG oslo_concurrency.lockutils [req-d69d3d4d-0d47-435e-83f2-121bc4074a04 req-f6d5eefd-0fa4-41e9-aaa4-9ba443bce1fe service nova] Lock "0e7c5243-ad33-4391-8977-b9019643e3de-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 951.587988] env[68674]: DEBUG oslo_concurrency.lockutils [req-d69d3d4d-0d47-435e-83f2-121bc4074a04 req-f6d5eefd-0fa4-41e9-aaa4-9ba443bce1fe service nova] Lock "0e7c5243-ad33-4391-8977-b9019643e3de-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 951.588113] env[68674]: DEBUG nova.compute.manager [req-d69d3d4d-0d47-435e-83f2-121bc4074a04 req-f6d5eefd-0fa4-41e9-aaa4-9ba443bce1fe service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] No waiting events found dispatching network-vif-plugged-861f9feb-a46d-4b29-851a-f958bd80ef86 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 951.588305] env[68674]: WARNING nova.compute.manager [req-d69d3d4d-0d47-435e-83f2-121bc4074a04 req-f6d5eefd-0fa4-41e9-aaa4-9ba443bce1fe service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Received unexpected event network-vif-plugged-861f9feb-a46d-4b29-851a-f958bd80ef86 for instance with vm_state active and task_state None. [ 951.588443] env[68674]: DEBUG nova.compute.manager [req-d69d3d4d-0d47-435e-83f2-121bc4074a04 req-f6d5eefd-0fa4-41e9-aaa4-9ba443bce1fe service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Received event network-changed-861f9feb-a46d-4b29-851a-f958bd80ef86 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 951.588573] env[68674]: DEBUG nova.compute.manager [req-d69d3d4d-0d47-435e-83f2-121bc4074a04 req-f6d5eefd-0fa4-41e9-aaa4-9ba443bce1fe service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Refreshing instance network info cache due to event network-changed-861f9feb-a46d-4b29-851a-f958bd80ef86. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 951.588748] env[68674]: DEBUG oslo_concurrency.lockutils [req-d69d3d4d-0d47-435e-83f2-121bc4074a04 req-f6d5eefd-0fa4-41e9-aaa4-9ba443bce1fe service nova] Acquiring lock "refresh_cache-0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.588907] env[68674]: DEBUG oslo_concurrency.lockutils [req-d69d3d4d-0d47-435e-83f2-121bc4074a04 req-f6d5eefd-0fa4-41e9-aaa4-9ba443bce1fe service nova] Acquired lock "refresh_cache-0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 951.589196] env[68674]: DEBUG nova.network.neutron [req-d69d3d4d-0d47-435e-83f2-121bc4074a04 req-f6d5eefd-0fa4-41e9-aaa4-9ba443bce1fe service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Refreshing network info cache for port 861f9feb-a46d-4b29-851a-f958bd80ef86 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 951.694061] env[68674]: DEBUG nova.scheduler.client.report [None req-78c4d3df-0db8-4e17-bfc5-df75502c4891 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 951.789298] env[68674]: DEBUG oslo_vmware.api [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Task: {'id': task-3240423, 'name': ReconfigVM_Task, 'duration_secs': 0.333601} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.792500] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Reconfigured VM instance instance-00000053 to attach disk [datastore1] 082fd3a5-b30e-41cc-8fba-dab2802a1e3e/082fd3a5-b30e-41cc-8fba-dab2802a1e3e.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 951.793069] env[68674]: DEBUG oslo_vmware.api [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240424, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.793283] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-26257c78-17e5-40cd-a50b-f76b24e581a3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.800296] env[68674]: DEBUG oslo_vmware.api [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Waiting for the task: (returnval){ [ 951.800296] env[68674]: value = "task-3240425" [ 951.800296] env[68674]: _type = "Task" [ 951.800296] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.808148] env[68674]: DEBUG oslo_vmware.api [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Task: {'id': task-3240425, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.855273] env[68674]: DEBUG oslo_concurrency.lockutils [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Releasing lock "refresh_cache-e894cd36-95c8-473b-9bbd-483f11fb5add" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 951.855273] env[68674]: DEBUG nova.compute.manager [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Instance network_info: |[{"id": "68f46874-a1b2-4e2f-ab13-1dd822565a4e", "address": "fa:16:3e:76:02:d7", "network": {"id": "2fe936d1-11af-436b-b09f-487e47a2a601", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1998689030-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ad20d6acb5b40b9a13ec33b1a1a6758", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "777870ab-362f-4a17-9c1c-8d9cc26cd4ce", "external-id": "nsx-vlan-transportzone-987", "segmentation_id": 987, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68f46874-a1", "ovs_interfaceid": "68f46874-a1b2-4e2f-ab13-1dd822565a4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 951.855605] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:02:d7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '777870ab-362f-4a17-9c1c-8d9cc26cd4ce', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '68f46874-a1b2-4e2f-ab13-1dd822565a4e', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 951.863324] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Creating folder: Project (3ad20d6acb5b40b9a13ec33b1a1a6758). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 951.863597] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-31b33e9e-a151-4339-8db9-c35e51a18b42 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.874561] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Created folder: Project (3ad20d6acb5b40b9a13ec33b1a1a6758) in parent group-v647377. [ 951.874969] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Creating folder: Instances. Parent ref: group-v647616. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 951.875106] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4a449d44-f1b3-453c-9823-2dca246040b8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.883589] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Created folder: Instances in parent group-v647616. [ 951.883869] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 951.884253] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 951.884366] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b4f36d1e-8e2e-40f6-9a3b-b5c5dad89031 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.904656] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 951.904656] env[68674]: value = "task-3240428" [ 951.904656] env[68674]: _type = "Task" [ 951.904656] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.911985] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240428, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.929069] env[68674]: DEBUG oslo_vmware.api [None req-90dde390-96cc-4651-aa04-5dee38987e6b tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Task: {'id': task-3240421, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.522967} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.929407] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-90dde390-96cc-4651-aa04-5dee38987e6b tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 951.929563] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-90dde390-96cc-4651-aa04-5dee38987e6b tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 951.929752] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-90dde390-96cc-4651-aa04-5dee38987e6b tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 951.929945] env[68674]: INFO nova.compute.manager [None req-90dde390-96cc-4651-aa04-5dee38987e6b tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Took 1.63 seconds to destroy the instance on the hypervisor. [ 951.930230] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-90dde390-96cc-4651-aa04-5dee38987e6b tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 951.930563] env[68674]: DEBUG nova.compute.manager [-] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 951.930563] env[68674]: DEBUG nova.network.neutron [-] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 952.041879] env[68674]: DEBUG oslo_vmware.api [None req-bc9247be-087b-4c61-956c-4ac9fb1fa0ad tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240422, 'name': PowerOnVM_Task, 'duration_secs': 0.587382} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.042333] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc9247be-087b-4c61-956c-4ac9fb1fa0ad tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 952.042623] env[68674]: DEBUG nova.compute.manager [None req-bc9247be-087b-4c61-956c-4ac9fb1fa0ad tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 952.043585] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35fb8c62-78fa-4d32-a76a-bd6308190484 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.198269] env[68674]: DEBUG oslo_concurrency.lockutils [None req-78c4d3df-0db8-4e17-bfc5-df75502c4891 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.948s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 952.207025] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.407s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 952.207025] env[68674]: INFO nova.compute.claims [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 952.237643] env[68674]: INFO nova.scheduler.client.report [None req-78c4d3df-0db8-4e17-bfc5-df75502c4891 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Deleted allocations for instance c4fd04a7-2b11-4c4b-84d1-53edc1e3f035 [ 952.281474] env[68674]: DEBUG nova.compute.manager [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 952.307019] env[68674]: DEBUG oslo_vmware.api [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240424, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.319737] env[68674]: DEBUG oslo_vmware.api [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Task: {'id': task-3240425, 'name': Rename_Task, 'duration_secs': 0.141419} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.320574] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 952.320574] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-374abe65-7aa2-4335-93a8-504c185bd132 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.327018] env[68674]: DEBUG nova.virt.hardware [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 952.327018] env[68674]: DEBUG nova.virt.hardware [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 952.327018] env[68674]: DEBUG nova.virt.hardware [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 952.327018] env[68674]: DEBUG nova.virt.hardware [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 952.327018] env[68674]: DEBUG nova.virt.hardware [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 952.327018] env[68674]: DEBUG nova.virt.hardware [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 952.327018] env[68674]: DEBUG nova.virt.hardware [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 952.327018] env[68674]: DEBUG nova.virt.hardware [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 952.327018] env[68674]: DEBUG nova.virt.hardware [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 952.327018] env[68674]: DEBUG nova.virt.hardware [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 952.327018] env[68674]: DEBUG nova.virt.hardware [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 952.327018] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef67f92d-fdc1-42c7-8c97-33f216767dc2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.335326] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a005197-0533-4207-aa70-86390744986f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.339362] env[68674]: DEBUG oslo_vmware.api [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Waiting for the task: (returnval){ [ 952.339362] env[68674]: value = "task-3240429" [ 952.339362] env[68674]: _type = "Task" [ 952.339362] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.362021] env[68674]: DEBUG oslo_vmware.api [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Task: {'id': task-3240429, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.399430] env[68674]: DEBUG nova.network.neutron [req-d69d3d4d-0d47-435e-83f2-121bc4074a04 req-f6d5eefd-0fa4-41e9-aaa4-9ba443bce1fe service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Updated VIF entry in instance network info cache for port 861f9feb-a46d-4b29-851a-f958bd80ef86. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 952.399430] env[68674]: DEBUG nova.network.neutron [req-d69d3d4d-0d47-435e-83f2-121bc4074a04 req-f6d5eefd-0fa4-41e9-aaa4-9ba443bce1fe service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Updating instance_info_cache with network_info: [{"id": "7d9b4902-f03b-4046-b4ba-0bc1296918da", "address": "fa:16:3e:17:ab:03", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.195", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d9b4902-f0", "ovs_interfaceid": "7d9b4902-f03b-4046-b4ba-0bc1296918da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "861f9feb-a46d-4b29-851a-f958bd80ef86", "address": "fa:16:3e:a7:8f:46", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap861f9feb-a4", "ovs_interfaceid": "861f9feb-a46d-4b29-851a-f958bd80ef86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.416428] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240428, 'name': CreateVM_Task, 'duration_secs': 0.370284} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.416599] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 952.417267] env[68674]: DEBUG oslo_concurrency.lockutils [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.417432] env[68674]: DEBUG oslo_concurrency.lockutils [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 952.417771] env[68674]: DEBUG oslo_concurrency.lockutils [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 952.418034] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cebedd43-4c42-45ec-9ac8-503a74a7cca6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.422766] env[68674]: DEBUG oslo_vmware.api [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Waiting for the task: (returnval){ [ 952.422766] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5242b8c6-a1e3-15c4-604e-4abe2e40e23b" [ 952.422766] env[68674]: _type = "Task" [ 952.422766] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.431103] env[68674]: DEBUG oslo_vmware.api [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5242b8c6-a1e3-15c4-604e-4abe2e40e23b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.722016] env[68674]: DEBUG nova.network.neutron [-] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.748321] env[68674]: DEBUG oslo_concurrency.lockutils [None req-78c4d3df-0db8-4e17-bfc5-df75502c4891 tempest-ServersWithSpecificFlavorTestJSON-281257334 tempest-ServersWithSpecificFlavorTestJSON-281257334-project-member] Lock "c4fd04a7-2b11-4c4b-84d1-53edc1e3f035" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.629s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 952.794467] env[68674]: DEBUG oslo_vmware.api [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240424, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.799622] env[68674]: DEBUG nova.network.neutron [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Successfully updated port: 06217b92-0ccd-4eaf-be24-4bbd6e81f3a3 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 952.849548] env[68674]: DEBUG oslo_vmware.api [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Task: {'id': task-3240429, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.902254] env[68674]: DEBUG oslo_concurrency.lockutils [req-d69d3d4d-0d47-435e-83f2-121bc4074a04 req-f6d5eefd-0fa4-41e9-aaa4-9ba443bce1fe service nova] Releasing lock "refresh_cache-0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 952.934659] env[68674]: DEBUG oslo_vmware.api [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5242b8c6-a1e3-15c4-604e-4abe2e40e23b, 'name': SearchDatastore_Task, 'duration_secs': 0.011513} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.934978] env[68674]: DEBUG oslo_concurrency.lockutils [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 952.935186] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 952.935477] env[68674]: DEBUG oslo_concurrency.lockutils [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.935631] env[68674]: DEBUG oslo_concurrency.lockutils [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 952.935814] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 952.936102] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b783f0fd-2145-4d8d-b100-c5749f53d85a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.949995] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 952.949995] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 952.950752] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e33d9f85-829a-4fea-88ca-39b356f21717 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.959936] env[68674]: DEBUG oslo_vmware.api [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Waiting for the task: (returnval){ [ 952.959936] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52959021-f74f-bdd6-316d-9197acdae4b8" [ 952.959936] env[68674]: _type = "Task" [ 952.959936] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.971298] env[68674]: DEBUG oslo_vmware.api [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52959021-f74f-bdd6-316d-9197acdae4b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.227350] env[68674]: INFO nova.compute.manager [-] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Took 1.30 seconds to deallocate network for instance. [ 953.297727] env[68674]: DEBUG oslo_vmware.api [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240424, 'name': ReconfigVM_Task, 'duration_secs': 1.773761} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.298281] env[68674]: DEBUG oslo_concurrency.lockutils [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Releasing lock "0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 953.298537] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Reconfigured VM to attach interface {{(pid=68674) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 953.307833] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Acquiring lock "refresh_cache-a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.307833] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Acquired lock "refresh_cache-a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 953.307833] env[68674]: DEBUG nova.network.neutron [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 953.353809] env[68674]: DEBUG oslo_vmware.api [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Task: {'id': task-3240429, 'name': PowerOnVM_Task, 'duration_secs': 0.720134} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.356018] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 953.356018] env[68674]: INFO nova.compute.manager [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Took 7.93 seconds to spawn the instance on the hypervisor. [ 953.356018] env[68674]: DEBUG nova.compute.manager [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 953.356851] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd8072f4-4943-478b-9bc6-ebdcd3ed94a1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.478025] env[68674]: DEBUG oslo_vmware.api [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52959021-f74f-bdd6-316d-9197acdae4b8, 'name': SearchDatastore_Task, 'duration_secs': 0.019785} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.478025] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd51633b-2c39-4cd9-8789-8e58c2ec4562 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.483753] env[68674]: DEBUG oslo_vmware.api [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Waiting for the task: (returnval){ [ 953.483753] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52099c93-b529-588f-bee8-2e6bc794a68e" [ 953.483753] env[68674]: _type = "Task" [ 953.483753] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.491744] env[68674]: DEBUG oslo_vmware.api [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52099c93-b529-588f-bee8-2e6bc794a68e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.567604] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43e93cba-54a0-4744-ab52-5a95438396ed {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.575829] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef44f019-03f0-40f6-8363-ef37ff8eecde {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.610967] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5183681b-9060-4510-8904-f9d7eed98ec3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.618487] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fd18f06-fe5c-43d4-a7e1-bf917ce4dd36 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.633158] env[68674]: DEBUG nova.compute.provider_tree [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 953.738309] env[68674]: DEBUG oslo_concurrency.lockutils [None req-90dde390-96cc-4651-aa04-5dee38987e6b tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 953.811027] env[68674]: DEBUG oslo_concurrency.lockutils [None req-318d53dc-80d6-4fa4-9ff2-2f4f352e2023 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "interface-0e7c5243-ad33-4391-8977-b9019643e3de-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.943s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 953.848886] env[68674]: DEBUG nova.network.neutron [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 953.876811] env[68674]: INFO nova.compute.manager [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Took 42.56 seconds to build instance. [ 953.981352] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 953.981648] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 954.002573] env[68674]: DEBUG oslo_vmware.api [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52099c93-b529-588f-bee8-2e6bc794a68e, 'name': SearchDatastore_Task, 'duration_secs': 0.01119} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.002894] env[68674]: DEBUG oslo_concurrency.lockutils [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 954.003640] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] e894cd36-95c8-473b-9bbd-483f11fb5add/e894cd36-95c8-473b-9bbd-483f11fb5add.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 954.003640] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-92702ae7-f332-426c-ab97-8b174bc858d1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.011251] env[68674]: DEBUG oslo_vmware.api [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Waiting for the task: (returnval){ [ 954.011251] env[68674]: value = "task-3240430" [ 954.011251] env[68674]: _type = "Task" [ 954.011251] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.020155] env[68674]: DEBUG oslo_vmware.api [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Task: {'id': task-3240430, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.021302] env[68674]: DEBUG nova.network.neutron [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Updating instance_info_cache with network_info: [{"id": "06217b92-0ccd-4eaf-be24-4bbd6e81f3a3", "address": "fa:16:3e:57:71:85", "network": {"id": "d4cab034-a246-4269-aae2-2107a2d0707d", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-75215434-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "973b07f90402492594d128e1e1d2e915", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06217b92-0c", "ovs_interfaceid": "06217b92-0ccd-4eaf-be24-4bbd6e81f3a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.136361] env[68674]: DEBUG nova.scheduler.client.report [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 954.356709] env[68674]: DEBUG nova.compute.manager [req-4e9bc5fc-c6ab-49dc-8b37-bafac92d2970 req-74c8cd27-669c-4726-b796-e8414e4f6a54 service nova] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Received event network-vif-deleted-0aaea2f0-3d87-4947-9cc4-0dffc9eddf95 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 954.357118] env[68674]: DEBUG nova.compute.manager [req-4e9bc5fc-c6ab-49dc-8b37-bafac92d2970 req-74c8cd27-669c-4726-b796-e8414e4f6a54 service nova] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Received event network-vif-plugged-06217b92-0ccd-4eaf-be24-4bbd6e81f3a3 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 954.357450] env[68674]: DEBUG oslo_concurrency.lockutils [req-4e9bc5fc-c6ab-49dc-8b37-bafac92d2970 req-74c8cd27-669c-4726-b796-e8414e4f6a54 service nova] Acquiring lock "a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 954.357827] env[68674]: DEBUG oslo_concurrency.lockutils [req-4e9bc5fc-c6ab-49dc-8b37-bafac92d2970 req-74c8cd27-669c-4726-b796-e8414e4f6a54 service nova] Lock "a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.358233] env[68674]: DEBUG oslo_concurrency.lockutils [req-4e9bc5fc-c6ab-49dc-8b37-bafac92d2970 req-74c8cd27-669c-4726-b796-e8414e4f6a54 service nova] Lock "a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.358449] env[68674]: DEBUG nova.compute.manager [req-4e9bc5fc-c6ab-49dc-8b37-bafac92d2970 req-74c8cd27-669c-4726-b796-e8414e4f6a54 service nova] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] No waiting events found dispatching network-vif-plugged-06217b92-0ccd-4eaf-be24-4bbd6e81f3a3 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 954.358751] env[68674]: WARNING nova.compute.manager [req-4e9bc5fc-c6ab-49dc-8b37-bafac92d2970 req-74c8cd27-669c-4726-b796-e8414e4f6a54 service nova] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Received unexpected event network-vif-plugged-06217b92-0ccd-4eaf-be24-4bbd6e81f3a3 for instance with vm_state building and task_state spawning. [ 954.359065] env[68674]: DEBUG nova.compute.manager [req-4e9bc5fc-c6ab-49dc-8b37-bafac92d2970 req-74c8cd27-669c-4726-b796-e8414e4f6a54 service nova] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Received event network-changed-06217b92-0ccd-4eaf-be24-4bbd6e81f3a3 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 954.359345] env[68674]: DEBUG nova.compute.manager [req-4e9bc5fc-c6ab-49dc-8b37-bafac92d2970 req-74c8cd27-669c-4726-b796-e8414e4f6a54 service nova] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Refreshing instance network info cache due to event network-changed-06217b92-0ccd-4eaf-be24-4bbd6e81f3a3. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 954.359652] env[68674]: DEBUG oslo_concurrency.lockutils [req-4e9bc5fc-c6ab-49dc-8b37-bafac92d2970 req-74c8cd27-669c-4726-b796-e8414e4f6a54 service nova] Acquiring lock "refresh_cache-a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.379918] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dc26ea66-f90d-4f4c-a4cb-1163604d695a tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Lock "082fd3a5-b30e-41cc-8fba-dab2802a1e3e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.078s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.492078] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 954.492078] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 954.492078] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 954.492078] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 954.492078] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 954.492078] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 954.492818] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68674) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 954.494179] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager.update_available_resource {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 954.528997] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Releasing lock "refresh_cache-a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 954.529857] env[68674]: DEBUG nova.compute.manager [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Instance network_info: |[{"id": "06217b92-0ccd-4eaf-be24-4bbd6e81f3a3", "address": "fa:16:3e:57:71:85", "network": {"id": "d4cab034-a246-4269-aae2-2107a2d0707d", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-75215434-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "973b07f90402492594d128e1e1d2e915", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06217b92-0c", "ovs_interfaceid": "06217b92-0ccd-4eaf-be24-4bbd6e81f3a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 954.530578] env[68674]: DEBUG oslo_vmware.api [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Task: {'id': task-3240430, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.531867] env[68674]: DEBUG oslo_concurrency.lockutils [req-4e9bc5fc-c6ab-49dc-8b37-bafac92d2970 req-74c8cd27-669c-4726-b796-e8414e4f6a54 service nova] Acquired lock "refresh_cache-a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 954.532036] env[68674]: DEBUG nova.network.neutron [req-4e9bc5fc-c6ab-49dc-8b37-bafac92d2970 req-74c8cd27-669c-4726-b796-e8414e4f6a54 service nova] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Refreshing network info cache for port 06217b92-0ccd-4eaf-be24-4bbd6e81f3a3 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 954.533514] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:57:71:85', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92e4d027-e755-417b-8eea-9a8f24b85140', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '06217b92-0ccd-4eaf-be24-4bbd6e81f3a3', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 954.543417] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Creating folder: Project (973b07f90402492594d128e1e1d2e915). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 954.544255] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d6faaa16-115b-4468-afde-1734bf0bf221 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.556866] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Created folder: Project (973b07f90402492594d128e1e1d2e915) in parent group-v647377. [ 954.558014] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Creating folder: Instances. Parent ref: group-v647619. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 954.558014] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-48524b54-e60d-43d7-b2e3-9e3e53cc813a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.567665] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Created folder: Instances in parent group-v647619. [ 954.567942] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 954.568178] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 954.568393] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-78d782fe-b7e2-435f-8342-0eac5c6ccbda {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.591451] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 954.591451] env[68674]: value = "task-3240433" [ 954.591451] env[68674]: _type = "Task" [ 954.591451] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.603727] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240433, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.642761] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.439s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.645428] env[68674]: DEBUG nova.compute.manager [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 954.647057] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8f0073ce-00c0-41ea-9917-5642b4bff7c0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.950s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.647057] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8f0073ce-00c0-41ea-9917-5642b4bff7c0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.649620] env[68674]: DEBUG oslo_concurrency.lockutils [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 24.814s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.649620] env[68674]: DEBUG nova.objects.instance [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68674) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 954.685556] env[68674]: INFO nova.scheduler.client.report [None req-8f0073ce-00c0-41ea-9917-5642b4bff7c0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Deleted allocations for instance 3d85c8c4-f09c-4f75-aff5-9a49d84ae006 [ 954.997644] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 955.032454] env[68674]: DEBUG oslo_vmware.api [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Task: {'id': task-3240430, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.106470] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240433, 'name': CreateVM_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.155619] env[68674]: DEBUG nova.compute.utils [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 955.159954] env[68674]: DEBUG nova.compute.manager [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 955.161157] env[68674]: DEBUG nova.network.neutron [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 955.198255] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8f0073ce-00c0-41ea-9917-5642b4bff7c0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "3d85c8c4-f09c-4f75-aff5-9a49d84ae006" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.368s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 955.532077] env[68674]: DEBUG oslo_vmware.api [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Task: {'id': task-3240430, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.573098] env[68674]: DEBUG nova.policy [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e268da8edd47413b9b87909dde064f64', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0cee54e456084086866d08b098a24b64', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 955.605537] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240433, 'name': CreateVM_Task, 'duration_secs': 0.748273} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.607844] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 955.607844] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.607844] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 955.608216] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 955.609059] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc78b314-25e8-4492-ba32-3c12df0c533a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.616334] env[68674]: DEBUG oslo_vmware.api [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Waiting for the task: (returnval){ [ 955.616334] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52153d8c-af58-16ff-58e9-06f7f15932dc" [ 955.616334] env[68674]: _type = "Task" [ 955.616334] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.629227] env[68674]: DEBUG oslo_vmware.api [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52153d8c-af58-16ff-58e9-06f7f15932dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.660715] env[68674]: DEBUG nova.compute.manager [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 955.664730] env[68674]: DEBUG oslo_concurrency.lockutils [None req-205f35e2-5d6a-4ad5-ba5c-3a4031837137 tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 955.669008] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4810fe99-909b-4010-a12c-11633f318162 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.973s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 955.669296] env[68674]: DEBUG nova.objects.instance [None req-4810fe99-909b-4010-a12c-11633f318162 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Lazy-loading 'resources' on Instance uuid 5e3f667c-5d3a-4465-9186-779563087480 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 955.824436] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ce24d966-e593-4c43-885e-64ce7013245f tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Acquiring lock "082fd3a5-b30e-41cc-8fba-dab2802a1e3e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 955.824715] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ce24d966-e593-4c43-885e-64ce7013245f tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Lock "082fd3a5-b30e-41cc-8fba-dab2802a1e3e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 955.825130] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ce24d966-e593-4c43-885e-64ce7013245f tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Acquiring lock "082fd3a5-b30e-41cc-8fba-dab2802a1e3e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 955.825223] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ce24d966-e593-4c43-885e-64ce7013245f tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Lock "082fd3a5-b30e-41cc-8fba-dab2802a1e3e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 955.825355] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ce24d966-e593-4c43-885e-64ce7013245f tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Lock "082fd3a5-b30e-41cc-8fba-dab2802a1e3e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 955.827482] env[68674]: INFO nova.compute.manager [None req-ce24d966-e593-4c43-885e-64ce7013245f tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Terminating instance [ 955.988355] env[68674]: DEBUG nova.network.neutron [req-4e9bc5fc-c6ab-49dc-8b37-bafac92d2970 req-74c8cd27-669c-4726-b796-e8414e4f6a54 service nova] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Updated VIF entry in instance network info cache for port 06217b92-0ccd-4eaf-be24-4bbd6e81f3a3. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 955.988733] env[68674]: DEBUG nova.network.neutron [req-4e9bc5fc-c6ab-49dc-8b37-bafac92d2970 req-74c8cd27-669c-4726-b796-e8414e4f6a54 service nova] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Updating instance_info_cache with network_info: [{"id": "06217b92-0ccd-4eaf-be24-4bbd6e81f3a3", "address": "fa:16:3e:57:71:85", "network": {"id": "d4cab034-a246-4269-aae2-2107a2d0707d", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-75215434-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "973b07f90402492594d128e1e1d2e915", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06217b92-0c", "ovs_interfaceid": "06217b92-0ccd-4eaf-be24-4bbd6e81f3a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.029261] env[68674]: DEBUG oslo_vmware.api [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Task: {'id': task-3240430, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.52893} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.029602] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] e894cd36-95c8-473b-9bbd-483f11fb5add/e894cd36-95c8-473b-9bbd-483f11fb5add.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 956.030726] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 956.030726] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ec60ab4e-0387-4007-9a83-2b64255a6920 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.036913] env[68674]: DEBUG oslo_vmware.api [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Waiting for the task: (returnval){ [ 956.036913] env[68674]: value = "task-3240434" [ 956.036913] env[68674]: _type = "Task" [ 956.036913] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.048055] env[68674]: DEBUG oslo_vmware.api [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Task: {'id': task-3240434, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.135193] env[68674]: DEBUG oslo_vmware.api [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52153d8c-af58-16ff-58e9-06f7f15932dc, 'name': SearchDatastore_Task, 'duration_secs': 0.018926} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.135193] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 956.135193] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 956.135193] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.135193] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 956.135193] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 956.135193] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5cd78ed3-e90b-4539-9321-b7ca13a8bfe8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.143248] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 956.143441] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 956.144251] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76d0c3e6-449f-420c-bb24-95cf0f829490 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.151562] env[68674]: DEBUG oslo_vmware.api [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Waiting for the task: (returnval){ [ 956.151562] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]526041a2-797e-0eb8-8525-1c844c4624ca" [ 956.151562] env[68674]: _type = "Task" [ 956.151562] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.160697] env[68674]: DEBUG oslo_vmware.api [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]526041a2-797e-0eb8-8525-1c844c4624ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.334970] env[68674]: DEBUG nova.compute.manager [None req-ce24d966-e593-4c43-885e-64ce7013245f tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 956.334970] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ce24d966-e593-4c43-885e-64ce7013245f tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 956.335234] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-401438b9-3f27-4d32-9d83-e08476afb045 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.349767] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce24d966-e593-4c43-885e-64ce7013245f tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 956.349767] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2adfa7fe-1511-4e73-8e3e-0b0cf5d90c52 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.355592] env[68674]: DEBUG oslo_vmware.api [None req-ce24d966-e593-4c43-885e-64ce7013245f tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Waiting for the task: (returnval){ [ 956.355592] env[68674]: value = "task-3240435" [ 956.355592] env[68674]: _type = "Task" [ 956.355592] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.370102] env[68674]: DEBUG oslo_vmware.api [None req-ce24d966-e593-4c43-885e-64ce7013245f tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Task: {'id': task-3240435, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.491997] env[68674]: DEBUG oslo_concurrency.lockutils [req-4e9bc5fc-c6ab-49dc-8b37-bafac92d2970 req-74c8cd27-669c-4726-b796-e8414e4f6a54 service nova] Releasing lock "refresh_cache-a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 956.524697] env[68674]: DEBUG nova.network.neutron [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Successfully created port: d16f8563-f821-46f1-ae60-2096ac4a4486 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 956.544674] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ad4a3ff-90bc-40c1-8606-9754930c5ac4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.554672] env[68674]: DEBUG nova.compute.manager [req-5b6c60c2-67c3-4367-bb1f-7d7705ecdd01 req-b4411f47-0857-4be0-83a7-865b2339ba9c service nova] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Received event network-changed-856fc34c-4049-4185-9ab1-8f86e2cfdeff {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 956.555081] env[68674]: DEBUG nova.compute.manager [req-5b6c60c2-67c3-4367-bb1f-7d7705ecdd01 req-b4411f47-0857-4be0-83a7-865b2339ba9c service nova] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Refreshing instance network info cache due to event network-changed-856fc34c-4049-4185-9ab1-8f86e2cfdeff. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 956.555471] env[68674]: DEBUG oslo_concurrency.lockutils [req-5b6c60c2-67c3-4367-bb1f-7d7705ecdd01 req-b4411f47-0857-4be0-83a7-865b2339ba9c service nova] Acquiring lock "refresh_cache-7d953e59-53c1-4041-a641-35c12c012f7e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.555629] env[68674]: DEBUG oslo_concurrency.lockutils [req-5b6c60c2-67c3-4367-bb1f-7d7705ecdd01 req-b4411f47-0857-4be0-83a7-865b2339ba9c service nova] Acquired lock "refresh_cache-7d953e59-53c1-4041-a641-35c12c012f7e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 956.555798] env[68674]: DEBUG nova.network.neutron [req-5b6c60c2-67c3-4367-bb1f-7d7705ecdd01 req-b4411f47-0857-4be0-83a7-865b2339ba9c service nova] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Refreshing network info cache for port 856fc34c-4049-4185-9ab1-8f86e2cfdeff {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 956.557440] env[68674]: DEBUG oslo_vmware.api [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Task: {'id': task-3240434, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.135436} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.558351] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 956.559471] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9dc1e59-3754-46f9-9e55-2a3b465757f8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.566964] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da2b7893-8e41-46dd-b98d-86ab5339acb8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.589652] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] e894cd36-95c8-473b-9bbd-483f11fb5add/e894cd36-95c8-473b-9bbd-483f11fb5add.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 956.591638] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9fc6f70c-d509-4815-a851-5b408ddf8260 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.637750] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b1ad801-986d-40a1-a64c-7f2b05594aa2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.642660] env[68674]: DEBUG oslo_vmware.api [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Waiting for the task: (returnval){ [ 956.642660] env[68674]: value = "task-3240436" [ 956.642660] env[68674]: _type = "Task" [ 956.642660] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.652437] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0434138d-7c7c-4776-93fe-c1892a384aed {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.664501] env[68674]: DEBUG oslo_vmware.api [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Task: {'id': task-3240436, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.674557] env[68674]: DEBUG nova.compute.provider_tree [None req-4810fe99-909b-4010-a12c-11633f318162 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 956.676963] env[68674]: DEBUG nova.compute.manager [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 956.683450] env[68674]: DEBUG oslo_vmware.api [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]526041a2-797e-0eb8-8525-1c844c4624ca, 'name': SearchDatastore_Task, 'duration_secs': 0.014998} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.684376] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-feb331ee-c984-488e-91ce-24b820dc7608 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.690604] env[68674]: DEBUG oslo_vmware.api [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Waiting for the task: (returnval){ [ 956.690604] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52fd5a83-03f5-60d1-dc8e-eb357e3a8c47" [ 956.690604] env[68674]: _type = "Task" [ 956.690604] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.699433] env[68674]: DEBUG oslo_vmware.api [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52fd5a83-03f5-60d1-dc8e-eb357e3a8c47, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.710143] env[68674]: DEBUG nova.virt.hardware [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 956.710406] env[68674]: DEBUG nova.virt.hardware [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 956.710567] env[68674]: DEBUG nova.virt.hardware [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 956.710761] env[68674]: DEBUG nova.virt.hardware [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 956.710939] env[68674]: DEBUG nova.virt.hardware [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 956.711133] env[68674]: DEBUG nova.virt.hardware [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 956.711394] env[68674]: DEBUG nova.virt.hardware [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 956.711594] env[68674]: DEBUG nova.virt.hardware [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 956.711792] env[68674]: DEBUG nova.virt.hardware [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 956.712012] env[68674]: DEBUG nova.virt.hardware [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 956.712175] env[68674]: DEBUG nova.virt.hardware [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 956.713123] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eefbaa6-4e33-4abf-81c7-7e0d5ee8657f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.721137] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50d7d72a-a92b-4216-bf7f-c10e73be1bb4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.871438] env[68674]: DEBUG oslo_vmware.api [None req-ce24d966-e593-4c43-885e-64ce7013245f tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Task: {'id': task-3240435, 'name': PowerOffVM_Task, 'duration_secs': 0.326522} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.874857] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce24d966-e593-4c43-885e-64ce7013245f tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 956.874857] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ce24d966-e593-4c43-885e-64ce7013245f tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 956.874857] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7aa62034-126d-496d-a7ac-61c1d6bcd141 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.941800] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ce24d966-e593-4c43-885e-64ce7013245f tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 956.942191] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ce24d966-e593-4c43-885e-64ce7013245f tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 956.942422] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce24d966-e593-4c43-885e-64ce7013245f tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Deleting the datastore file [datastore1] 082fd3a5-b30e-41cc-8fba-dab2802a1e3e {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 956.942777] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-55eddd58-8e26-4aa1-97cc-90497c1326cf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.949778] env[68674]: DEBUG oslo_vmware.api [None req-ce24d966-e593-4c43-885e-64ce7013245f tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Waiting for the task: (returnval){ [ 956.949778] env[68674]: value = "task-3240438" [ 956.949778] env[68674]: _type = "Task" [ 956.949778] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.958061] env[68674]: DEBUG oslo_vmware.api [None req-ce24d966-e593-4c43-885e-64ce7013245f tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Task: {'id': task-3240438, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.152275] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "interface-0e7c5243-ad33-4391-8977-b9019643e3de-d4a0023c-9d88-48c5-9362-16dd8aca5a74" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 957.152813] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "interface-0e7c5243-ad33-4391-8977-b9019643e3de-d4a0023c-9d88-48c5-9362-16dd8aca5a74" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 957.153457] env[68674]: DEBUG nova.objects.instance [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lazy-loading 'flavor' on Instance uuid 0e7c5243-ad33-4391-8977-b9019643e3de {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 957.170348] env[68674]: DEBUG oslo_vmware.api [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Task: {'id': task-3240436, 'name': ReconfigVM_Task, 'duration_secs': 0.268632} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.170914] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Reconfigured VM instance instance-00000054 to attach disk [datastore1] e894cd36-95c8-473b-9bbd-483f11fb5add/e894cd36-95c8-473b-9bbd-483f11fb5add.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 957.171876] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5ee2be6a-93ea-4e99-b3fe-39f0665fdc82 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.180535] env[68674]: DEBUG oslo_vmware.api [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Waiting for the task: (returnval){ [ 957.180535] env[68674]: value = "task-3240439" [ 957.180535] env[68674]: _type = "Task" [ 957.180535] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.180535] env[68674]: DEBUG nova.scheduler.client.report [None req-4810fe99-909b-4010-a12c-11633f318162 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 957.194126] env[68674]: DEBUG oslo_vmware.api [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Task: {'id': task-3240439, 'name': Rename_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.203664] env[68674]: DEBUG oslo_vmware.api [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52fd5a83-03f5-60d1-dc8e-eb357e3a8c47, 'name': SearchDatastore_Task, 'duration_secs': 0.013515} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.203981] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 957.204425] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6/a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 957.204829] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-626e5950-a731-4a13-9b35-246d116b2957 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.211794] env[68674]: DEBUG oslo_vmware.api [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Waiting for the task: (returnval){ [ 957.211794] env[68674]: value = "task-3240440" [ 957.211794] env[68674]: _type = "Task" [ 957.211794] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.223291] env[68674]: DEBUG oslo_vmware.api [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240440, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.262851] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "2a7a6269-65a8-402c-b174-a4a46d20a33a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 957.263202] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "2a7a6269-65a8-402c-b174-a4a46d20a33a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 957.362162] env[68674]: DEBUG nova.network.neutron [req-5b6c60c2-67c3-4367-bb1f-7d7705ecdd01 req-b4411f47-0857-4be0-83a7-865b2339ba9c service nova] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Updated VIF entry in instance network info cache for port 856fc34c-4049-4185-9ab1-8f86e2cfdeff. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 957.362563] env[68674]: DEBUG nova.network.neutron [req-5b6c60c2-67c3-4367-bb1f-7d7705ecdd01 req-b4411f47-0857-4be0-83a7-865b2339ba9c service nova] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Updating instance_info_cache with network_info: [{"id": "856fc34c-4049-4185-9ab1-8f86e2cfdeff", "address": "fa:16:3e:28:1e:94", "network": {"id": "1674c138-dbec-4d03-b5b0-d1944ab38577", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-143958570-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.146", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a84d9d6e23bd40049c34e6f087252b4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap856fc34c-40", "ovs_interfaceid": "856fc34c-4049-4185-9ab1-8f86e2cfdeff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.460651] env[68674]: DEBUG oslo_vmware.api [None req-ce24d966-e593-4c43-885e-64ce7013245f tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Task: {'id': task-3240438, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.358547} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.460945] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce24d966-e593-4c43-885e-64ce7013245f tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 957.461146] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ce24d966-e593-4c43-885e-64ce7013245f tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 957.461324] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ce24d966-e593-4c43-885e-64ce7013245f tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 957.461527] env[68674]: INFO nova.compute.manager [None req-ce24d966-e593-4c43-885e-64ce7013245f tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Took 1.13 seconds to destroy the instance on the hypervisor. [ 957.461740] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ce24d966-e593-4c43-885e-64ce7013245f tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 957.462630] env[68674]: DEBUG nova.compute.manager [-] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 957.462630] env[68674]: DEBUG nova.network.neutron [-] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 957.691599] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4810fe99-909b-4010-a12c-11633f318162 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.020s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 957.702019] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.820s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 957.702019] env[68674]: DEBUG nova.objects.instance [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Lazy-loading 'resources' on Instance uuid 245089a5-929d-49b0-aa36-749d342e8473 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 957.702019] env[68674]: DEBUG oslo_vmware.api [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Task: {'id': task-3240439, 'name': Rename_Task, 'duration_secs': 0.167076} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.702019] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 957.702019] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dd7b3e97-89f4-4571-bc45-cd0eb2a610b1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.709337] env[68674]: DEBUG oslo_vmware.api [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Waiting for the task: (returnval){ [ 957.709337] env[68674]: value = "task-3240441" [ 957.709337] env[68674]: _type = "Task" [ 957.709337] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.726314] env[68674]: DEBUG oslo_vmware.api [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Task: {'id': task-3240441, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.728220] env[68674]: DEBUG oslo_vmware.api [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240440, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.751596] env[68674]: INFO nova.scheduler.client.report [None req-4810fe99-909b-4010-a12c-11633f318162 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Deleted allocations for instance 5e3f667c-5d3a-4465-9186-779563087480 [ 957.765312] env[68674]: DEBUG nova.compute.manager [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 957.865813] env[68674]: DEBUG oslo_concurrency.lockutils [req-5b6c60c2-67c3-4367-bb1f-7d7705ecdd01 req-b4411f47-0857-4be0-83a7-865b2339ba9c service nova] Releasing lock "refresh_cache-7d953e59-53c1-4041-a641-35c12c012f7e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 957.865871] env[68674]: DEBUG nova.compute.manager [req-5b6c60c2-67c3-4367-bb1f-7d7705ecdd01 req-b4411f47-0857-4be0-83a7-865b2339ba9c service nova] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Received event network-changed-856fc34c-4049-4185-9ab1-8f86e2cfdeff {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 957.866143] env[68674]: DEBUG nova.compute.manager [req-5b6c60c2-67c3-4367-bb1f-7d7705ecdd01 req-b4411f47-0857-4be0-83a7-865b2339ba9c service nova] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Refreshing instance network info cache due to event network-changed-856fc34c-4049-4185-9ab1-8f86e2cfdeff. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 957.866334] env[68674]: DEBUG oslo_concurrency.lockutils [req-5b6c60c2-67c3-4367-bb1f-7d7705ecdd01 req-b4411f47-0857-4be0-83a7-865b2339ba9c service nova] Acquiring lock "refresh_cache-7d953e59-53c1-4041-a641-35c12c012f7e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.866481] env[68674]: DEBUG oslo_concurrency.lockutils [req-5b6c60c2-67c3-4367-bb1f-7d7705ecdd01 req-b4411f47-0857-4be0-83a7-865b2339ba9c service nova] Acquired lock "refresh_cache-7d953e59-53c1-4041-a641-35c12c012f7e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 957.866644] env[68674]: DEBUG nova.network.neutron [req-5b6c60c2-67c3-4367-bb1f-7d7705ecdd01 req-b4411f47-0857-4be0-83a7-865b2339ba9c service nova] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Refreshing network info cache for port 856fc34c-4049-4185-9ab1-8f86e2cfdeff {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 957.921317] env[68674]: DEBUG nova.objects.instance [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lazy-loading 'pci_requests' on Instance uuid 0e7c5243-ad33-4391-8977-b9019643e3de {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 958.003456] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Acquiring lock "fa8c58b7-a462-437f-b1ed-57fef6aa3903" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 958.004307] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Lock "fa8c58b7-a462-437f-b1ed-57fef6aa3903" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 958.236451] env[68674]: DEBUG oslo_vmware.api [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Task: {'id': task-3240441, 'name': PowerOnVM_Task, 'duration_secs': 0.490787} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.236783] env[68674]: DEBUG oslo_vmware.api [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240440, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.551883} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.240246] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 958.240538] env[68674]: INFO nova.compute.manager [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Took 10.35 seconds to spawn the instance on the hypervisor. [ 958.240782] env[68674]: DEBUG nova.compute.manager [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 958.241224] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6/a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 958.241423] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 958.242727] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3be8db8c-b01d-4528-8407-a7348c296e9a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.247102] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5ba84778-f1fa-440d-8133-75ff91dec690 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.266362] env[68674]: DEBUG oslo_vmware.api [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Waiting for the task: (returnval){ [ 958.266362] env[68674]: value = "task-3240442" [ 958.266362] env[68674]: _type = "Task" [ 958.266362] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.268423] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4810fe99-909b-4010-a12c-11633f318162 tempest-AttachInterfacesUnderV243Test-57951466 tempest-AttachInterfacesUnderV243Test-57951466-project-member] Lock "5e3f667c-5d3a-4465-9186-779563087480" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.519s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 958.282143] env[68674]: DEBUG oslo_vmware.api [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240442, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.294960] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 958.316587] env[68674]: DEBUG nova.network.neutron [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Successfully updated port: d16f8563-f821-46f1-ae60-2096ac4a4486 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 958.423476] env[68674]: DEBUG nova.objects.base [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Object Instance<0e7c5243-ad33-4391-8977-b9019643e3de> lazy-loaded attributes: flavor,pci_requests {{(pid=68674) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 958.423642] env[68674]: DEBUG nova.network.neutron [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 958.507890] env[68674]: DEBUG nova.compute.manager [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 958.533318] env[68674]: DEBUG nova.network.neutron [-] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.535519] env[68674]: DEBUG nova.policy [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fd6c4d1912754a2ea44a65b455b7413c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '21163cbc3a5a4dc3abc832c4560c33e2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 958.593423] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-503ff486-2cbe-41f7-ab58-f0ba26598039 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.601127] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f702c0d3-0b85-4ad7-9e37-363add78ffe8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.635875] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bd249ac-c31e-4c1d-9c86-587234cd897a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.643409] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bb7f8d5-66ff-41ee-9a2b-4a9626723df5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.657686] env[68674]: DEBUG nova.compute.provider_tree [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 958.777352] env[68674]: INFO nova.compute.manager [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Took 45.14 seconds to build instance. [ 958.785409] env[68674]: DEBUG oslo_vmware.api [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240442, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.152066} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.785682] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 958.786527] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78d97af5-5ae5-498f-b20f-1919d1830551 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.795761] env[68674]: DEBUG nova.network.neutron [req-5b6c60c2-67c3-4367-bb1f-7d7705ecdd01 req-b4411f47-0857-4be0-83a7-865b2339ba9c service nova] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Updated VIF entry in instance network info cache for port 856fc34c-4049-4185-9ab1-8f86e2cfdeff. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 958.796279] env[68674]: DEBUG nova.network.neutron [req-5b6c60c2-67c3-4367-bb1f-7d7705ecdd01 req-b4411f47-0857-4be0-83a7-865b2339ba9c service nova] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Updating instance_info_cache with network_info: [{"id": "856fc34c-4049-4185-9ab1-8f86e2cfdeff", "address": "fa:16:3e:28:1e:94", "network": {"id": "1674c138-dbec-4d03-b5b0-d1944ab38577", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-143958570-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.146", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a84d9d6e23bd40049c34e6f087252b4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap856fc34c-40", "ovs_interfaceid": "856fc34c-4049-4185-9ab1-8f86e2cfdeff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.825758] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Reconfiguring VM instance instance-00000055 to attach disk [datastore2] a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6/a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 958.825758] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "refresh_cache-6af32e52-f10e-47be-ab36-e130614ba9e8" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.825758] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquired lock "refresh_cache-6af32e52-f10e-47be-ab36-e130614ba9e8" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 958.825758] env[68674]: DEBUG nova.network.neutron [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 958.825960] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-59b8b1d2-1367-484d-8de4-05bc8b998d84 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.851785] env[68674]: DEBUG oslo_vmware.api [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Waiting for the task: (returnval){ [ 958.851785] env[68674]: value = "task-3240443" [ 958.851785] env[68674]: _type = "Task" [ 958.851785] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.860592] env[68674]: DEBUG oslo_vmware.api [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240443, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.913120] env[68674]: DEBUG nova.network.neutron [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 959.036830] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 959.039836] env[68674]: INFO nova.compute.manager [-] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Took 1.58 seconds to deallocate network for instance. [ 959.046403] env[68674]: DEBUG nova.compute.manager [req-41786a43-94f1-4b55-b280-2d15e4ce73c5 req-47361691-cb24-4126-baba-c7b7d46fa038 service nova] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Received event network-vif-deleted-8ede585c-c152-4f0a-8740-215268e36b27 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 959.047102] env[68674]: DEBUG nova.compute.manager [req-41786a43-94f1-4b55-b280-2d15e4ce73c5 req-47361691-cb24-4126-baba-c7b7d46fa038 service nova] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Received event network-vif-plugged-d16f8563-f821-46f1-ae60-2096ac4a4486 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 959.047102] env[68674]: DEBUG oslo_concurrency.lockutils [req-41786a43-94f1-4b55-b280-2d15e4ce73c5 req-47361691-cb24-4126-baba-c7b7d46fa038 service nova] Acquiring lock "6af32e52-f10e-47be-ab36-e130614ba9e8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 959.047102] env[68674]: DEBUG oslo_concurrency.lockutils [req-41786a43-94f1-4b55-b280-2d15e4ce73c5 req-47361691-cb24-4126-baba-c7b7d46fa038 service nova] Lock "6af32e52-f10e-47be-ab36-e130614ba9e8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 959.047246] env[68674]: DEBUG oslo_concurrency.lockutils [req-41786a43-94f1-4b55-b280-2d15e4ce73c5 req-47361691-cb24-4126-baba-c7b7d46fa038 service nova] Lock "6af32e52-f10e-47be-ab36-e130614ba9e8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 959.047288] env[68674]: DEBUG nova.compute.manager [req-41786a43-94f1-4b55-b280-2d15e4ce73c5 req-47361691-cb24-4126-baba-c7b7d46fa038 service nova] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] No waiting events found dispatching network-vif-plugged-d16f8563-f821-46f1-ae60-2096ac4a4486 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 959.047445] env[68674]: WARNING nova.compute.manager [req-41786a43-94f1-4b55-b280-2d15e4ce73c5 req-47361691-cb24-4126-baba-c7b7d46fa038 service nova] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Received unexpected event network-vif-plugged-d16f8563-f821-46f1-ae60-2096ac4a4486 for instance with vm_state building and task_state spawning. [ 959.047602] env[68674]: DEBUG nova.compute.manager [req-41786a43-94f1-4b55-b280-2d15e4ce73c5 req-47361691-cb24-4126-baba-c7b7d46fa038 service nova] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Received event network-changed-d16f8563-f821-46f1-ae60-2096ac4a4486 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 959.047798] env[68674]: DEBUG nova.compute.manager [req-41786a43-94f1-4b55-b280-2d15e4ce73c5 req-47361691-cb24-4126-baba-c7b7d46fa038 service nova] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Refreshing instance network info cache due to event network-changed-d16f8563-f821-46f1-ae60-2096ac4a4486. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 959.047908] env[68674]: DEBUG oslo_concurrency.lockutils [req-41786a43-94f1-4b55-b280-2d15e4ce73c5 req-47361691-cb24-4126-baba-c7b7d46fa038 service nova] Acquiring lock "refresh_cache-6af32e52-f10e-47be-ab36-e130614ba9e8" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.161549] env[68674]: DEBUG nova.scheduler.client.report [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 959.212144] env[68674]: DEBUG nova.network.neutron [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Updating instance_info_cache with network_info: [{"id": "d16f8563-f821-46f1-ae60-2096ac4a4486", "address": "fa:16:3e:b5:4c:a3", "network": {"id": "c0c4733f-8d0b-4cee-883f-2ad57ed16158", "bridge": "br-int", "label": "tempest-ServersTestJSON-40114649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cee54e456084086866d08b098a24b64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd16f8563-f8", "ovs_interfaceid": "d16f8563-f821-46f1-ae60-2096ac4a4486", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.280969] env[68674]: DEBUG oslo_concurrency.lockutils [None req-85b3c08d-9b91-4018-a31e-3da4565b9354 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Lock "e894cd36-95c8-473b-9bbd-483f11fb5add" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.648s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 959.299997] env[68674]: DEBUG oslo_concurrency.lockutils [req-5b6c60c2-67c3-4367-bb1f-7d7705ecdd01 req-b4411f47-0857-4be0-83a7-865b2339ba9c service nova] Releasing lock "refresh_cache-7d953e59-53c1-4041-a641-35c12c012f7e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 959.363640] env[68674]: DEBUG oslo_vmware.api [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240443, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.549900] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ce24d966-e593-4c43-885e-64ce7013245f tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 959.667417] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.969s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 959.670153] env[68674]: DEBUG oslo_concurrency.lockutils [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.398s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 959.672257] env[68674]: INFO nova.compute.claims [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 959.699251] env[68674]: INFO nova.scheduler.client.report [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Deleted allocations for instance 245089a5-929d-49b0-aa36-749d342e8473 [ 959.713072] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Releasing lock "refresh_cache-6af32e52-f10e-47be-ab36-e130614ba9e8" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 959.713072] env[68674]: DEBUG nova.compute.manager [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Instance network_info: |[{"id": "d16f8563-f821-46f1-ae60-2096ac4a4486", "address": "fa:16:3e:b5:4c:a3", "network": {"id": "c0c4733f-8d0b-4cee-883f-2ad57ed16158", "bridge": "br-int", "label": "tempest-ServersTestJSON-40114649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cee54e456084086866d08b098a24b64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd16f8563-f8", "ovs_interfaceid": "d16f8563-f821-46f1-ae60-2096ac4a4486", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 959.713294] env[68674]: DEBUG oslo_concurrency.lockutils [req-41786a43-94f1-4b55-b280-2d15e4ce73c5 req-47361691-cb24-4126-baba-c7b7d46fa038 service nova] Acquired lock "refresh_cache-6af32e52-f10e-47be-ab36-e130614ba9e8" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 959.713477] env[68674]: DEBUG nova.network.neutron [req-41786a43-94f1-4b55-b280-2d15e4ce73c5 req-47361691-cb24-4126-baba-c7b7d46fa038 service nova] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Refreshing network info cache for port d16f8563-f821-46f1-ae60-2096ac4a4486 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 959.714652] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b5:4c:a3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd16f8563-f821-46f1-ae60-2096ac4a4486', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 959.722290] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 959.725696] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 959.726231] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6454b747-725d-460d-b483-fa70bcde7fb3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.749342] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 959.749342] env[68674]: value = "task-3240444" [ 959.749342] env[68674]: _type = "Task" [ 959.749342] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.757912] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240444, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.864025] env[68674]: DEBUG oslo_vmware.api [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240443, 'name': ReconfigVM_Task, 'duration_secs': 0.757544} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.864481] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Reconfigured VM instance instance-00000055 to attach disk [datastore2] a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6/a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 959.865141] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ec5e57f2-8b4a-499a-9652-2fc7a5035b8f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.873568] env[68674]: DEBUG oslo_vmware.api [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Waiting for the task: (returnval){ [ 959.873568] env[68674]: value = "task-3240445" [ 959.873568] env[68674]: _type = "Task" [ 959.873568] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.881404] env[68674]: DEBUG oslo_vmware.api [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240445, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.001468] env[68674]: DEBUG nova.network.neutron [req-41786a43-94f1-4b55-b280-2d15e4ce73c5 req-47361691-cb24-4126-baba-c7b7d46fa038 service nova] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Updated VIF entry in instance network info cache for port d16f8563-f821-46f1-ae60-2096ac4a4486. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 960.001560] env[68674]: DEBUG nova.network.neutron [req-41786a43-94f1-4b55-b280-2d15e4ce73c5 req-47361691-cb24-4126-baba-c7b7d46fa038 service nova] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Updating instance_info_cache with network_info: [{"id": "d16f8563-f821-46f1-ae60-2096ac4a4486", "address": "fa:16:3e:b5:4c:a3", "network": {"id": "c0c4733f-8d0b-4cee-883f-2ad57ed16158", "bridge": "br-int", "label": "tempest-ServersTestJSON-40114649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cee54e456084086866d08b098a24b64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd16f8563-f8", "ovs_interfaceid": "d16f8563-f821-46f1-ae60-2096ac4a4486", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.206087] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2e183223-3927-4add-a8a0-cc2f54f09e0a tempest-ServersListShow298Test-2008292874 tempest-ServersListShow298Test-2008292874-project-member] Lock "245089a5-929d-49b0-aa36-749d342e8473" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.684s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 960.261358] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240444, 'name': CreateVM_Task, 'duration_secs': 0.377766} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.261641] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 960.262476] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.262622] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 960.263038] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 960.263365] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9d1343e-8e7d-4812-a3f9-2d3167a3aab7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.267884] env[68674]: DEBUG oslo_vmware.api [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 960.267884] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521383b6-4f58-9b84-9da4-05bdf87efc64" [ 960.267884] env[68674]: _type = "Task" [ 960.267884] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.275824] env[68674]: DEBUG oslo_vmware.api [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521383b6-4f58-9b84-9da4-05bdf87efc64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.383632] env[68674]: DEBUG oslo_vmware.api [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240445, 'name': Rename_Task, 'duration_secs': 0.231912} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.383963] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 960.384304] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-16462d9e-3f78-4fc9-be91-c9ca6660b2be {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.391092] env[68674]: DEBUG oslo_vmware.api [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Waiting for the task: (returnval){ [ 960.391092] env[68674]: value = "task-3240446" [ 960.391092] env[68674]: _type = "Task" [ 960.391092] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.398769] env[68674]: DEBUG oslo_vmware.api [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240446, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.432405] env[68674]: DEBUG nova.network.neutron [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Successfully updated port: d4a0023c-9d88-48c5-9362-16dd8aca5a74 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 960.504732] env[68674]: DEBUG oslo_concurrency.lockutils [req-41786a43-94f1-4b55-b280-2d15e4ce73c5 req-47361691-cb24-4126-baba-c7b7d46fa038 service nova] Releasing lock "refresh_cache-6af32e52-f10e-47be-ab36-e130614ba9e8" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 960.781156] env[68674]: DEBUG oslo_vmware.api [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521383b6-4f58-9b84-9da4-05bdf87efc64, 'name': SearchDatastore_Task, 'duration_secs': 0.013423} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.782027] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 960.782027] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 960.782027] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.782027] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 960.782250] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 960.782420] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f75dfc6b-00a7-45fc-a1d3-02aae870b198 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.793531] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 960.793598] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 960.794287] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1b078a9-ffa6-4843-b06f-08aa419607d7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.802519] env[68674]: DEBUG oslo_vmware.api [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 960.802519] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d16c55-8f14-d188-0e47-11c3a8e4df16" [ 960.802519] env[68674]: _type = "Task" [ 960.802519] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.810660] env[68674]: DEBUG oslo_vmware.api [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d16c55-8f14-d188-0e47-11c3a8e4df16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.905606] env[68674]: DEBUG oslo_vmware.api [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240446, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.935259] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "refresh_cache-0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.935448] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquired lock "refresh_cache-0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 960.935851] env[68674]: DEBUG nova.network.neutron [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 961.046747] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dec19520-27d5-469e-ad42-de473b38ce97 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.060111] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb18443c-2df6-47e1-891c-1e826d7d4bda {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.094373] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4815584f-b92e-4f94-bcc3-40865783f355 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.103069] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-805c4ad6-1c3d-41a6-9cbb-5f59e49e1cbe {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.117559] env[68674]: DEBUG nova.compute.provider_tree [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 961.223228] env[68674]: DEBUG nova.compute.manager [req-aa3bd841-da45-4b7f-af81-1a368a11c8e1 req-5d2501df-7968-4ceb-8aad-b17b47583804 service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Received event network-vif-plugged-d4a0023c-9d88-48c5-9362-16dd8aca5a74 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 961.223478] env[68674]: DEBUG oslo_concurrency.lockutils [req-aa3bd841-da45-4b7f-af81-1a368a11c8e1 req-5d2501df-7968-4ceb-8aad-b17b47583804 service nova] Acquiring lock "0e7c5243-ad33-4391-8977-b9019643e3de-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 961.223654] env[68674]: DEBUG oslo_concurrency.lockutils [req-aa3bd841-da45-4b7f-af81-1a368a11c8e1 req-5d2501df-7968-4ceb-8aad-b17b47583804 service nova] Lock "0e7c5243-ad33-4391-8977-b9019643e3de-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 961.223823] env[68674]: DEBUG oslo_concurrency.lockutils [req-aa3bd841-da45-4b7f-af81-1a368a11c8e1 req-5d2501df-7968-4ceb-8aad-b17b47583804 service nova] Lock "0e7c5243-ad33-4391-8977-b9019643e3de-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 961.223991] env[68674]: DEBUG nova.compute.manager [req-aa3bd841-da45-4b7f-af81-1a368a11c8e1 req-5d2501df-7968-4ceb-8aad-b17b47583804 service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] No waiting events found dispatching network-vif-plugged-d4a0023c-9d88-48c5-9362-16dd8aca5a74 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 961.224680] env[68674]: WARNING nova.compute.manager [req-aa3bd841-da45-4b7f-af81-1a368a11c8e1 req-5d2501df-7968-4ceb-8aad-b17b47583804 service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Received unexpected event network-vif-plugged-d4a0023c-9d88-48c5-9362-16dd8aca5a74 for instance with vm_state active and task_state None. [ 961.225053] env[68674]: DEBUG nova.compute.manager [req-aa3bd841-da45-4b7f-af81-1a368a11c8e1 req-5d2501df-7968-4ceb-8aad-b17b47583804 service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Received event network-changed-d4a0023c-9d88-48c5-9362-16dd8aca5a74 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 961.225248] env[68674]: DEBUG nova.compute.manager [req-aa3bd841-da45-4b7f-af81-1a368a11c8e1 req-5d2501df-7968-4ceb-8aad-b17b47583804 service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Refreshing instance network info cache due to event network-changed-d4a0023c-9d88-48c5-9362-16dd8aca5a74. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 961.225426] env[68674]: DEBUG oslo_concurrency.lockutils [req-aa3bd841-da45-4b7f-af81-1a368a11c8e1 req-5d2501df-7968-4ceb-8aad-b17b47583804 service nova] Acquiring lock "refresh_cache-0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.313486] env[68674]: DEBUG oslo_vmware.api [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d16c55-8f14-d188-0e47-11c3a8e4df16, 'name': SearchDatastore_Task, 'duration_secs': 0.009671} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.314591] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e4be335-098c-42af-bd8a-308b9a2f1989 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.320574] env[68674]: DEBUG oslo_vmware.api [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 961.320574] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525f47fa-e5cf-3be0-c78e-8191ae4bee98" [ 961.320574] env[68674]: _type = "Task" [ 961.320574] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.333022] env[68674]: DEBUG oslo_vmware.api [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525f47fa-e5cf-3be0-c78e-8191ae4bee98, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.403654] env[68674]: DEBUG oslo_vmware.api [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240446, 'name': PowerOnVM_Task, 'duration_secs': 0.580185} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.403994] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 961.404046] env[68674]: INFO nova.compute.manager [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Took 9.12 seconds to spawn the instance on the hypervisor. [ 961.404207] env[68674]: DEBUG nova.compute.manager [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 961.404973] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01369e53-f203-4672-8e01-5c57d44ce2ba {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.480441] env[68674]: WARNING nova.network.neutron [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] a803f1d7-ea36-4d0a-9a85-9b7a8d27f698 already exists in list: networks containing: ['a803f1d7-ea36-4d0a-9a85-9b7a8d27f698']. ignoring it [ 961.480775] env[68674]: WARNING nova.network.neutron [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] a803f1d7-ea36-4d0a-9a85-9b7a8d27f698 already exists in list: networks containing: ['a803f1d7-ea36-4d0a-9a85-9b7a8d27f698']. ignoring it [ 961.621229] env[68674]: DEBUG nova.scheduler.client.report [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 961.832096] env[68674]: DEBUG oslo_vmware.api [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525f47fa-e5cf-3be0-c78e-8191ae4bee98, 'name': SearchDatastore_Task, 'duration_secs': 0.023438} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.832096] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 961.832096] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 6af32e52-f10e-47be-ab36-e130614ba9e8/6af32e52-f10e-47be-ab36-e130614ba9e8.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 961.832640] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ddeac02f-efd5-4f85-a71f-4c717715a6de {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.839645] env[68674]: DEBUG oslo_vmware.api [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 961.839645] env[68674]: value = "task-3240447" [ 961.839645] env[68674]: _type = "Task" [ 961.839645] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.847816] env[68674]: DEBUG oslo_vmware.api [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240447, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.924698] env[68674]: INFO nova.compute.manager [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Took 45.67 seconds to build instance. [ 962.125696] env[68674]: DEBUG oslo_concurrency.lockutils [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.455s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 962.126385] env[68674]: DEBUG nova.compute.manager [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 962.129334] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dbe27768-9a9c-4447-b176-0bd43325f053 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.511s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 962.129895] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dbe27768-9a9c-4447-b176-0bd43325f053 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 962.132375] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 20.157s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 962.168807] env[68674]: INFO nova.scheduler.client.report [None req-dbe27768-9a9c-4447-b176-0bd43325f053 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Deleted allocations for instance 7aa58e2f-1202-4252-9c38-ce53084c573f [ 962.235950] env[68674]: DEBUG nova.network.neutron [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Updating instance_info_cache with network_info: [{"id": "7d9b4902-f03b-4046-b4ba-0bc1296918da", "address": "fa:16:3e:17:ab:03", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.195", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d9b4902-f0", "ovs_interfaceid": "7d9b4902-f03b-4046-b4ba-0bc1296918da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "861f9feb-a46d-4b29-851a-f958bd80ef86", "address": "fa:16:3e:a7:8f:46", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap861f9feb-a4", "ovs_interfaceid": "861f9feb-a46d-4b29-851a-f958bd80ef86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d4a0023c-9d88-48c5-9362-16dd8aca5a74", "address": "fa:16:3e:93:1a:51", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4a0023c-9d", "ovs_interfaceid": "d4a0023c-9d88-48c5-9362-16dd8aca5a74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.352701] env[68674]: DEBUG oslo_vmware.api [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240447, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.482033} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.353119] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 6af32e52-f10e-47be-ab36-e130614ba9e8/6af32e52-f10e-47be-ab36-e130614ba9e8.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 962.353817] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 962.353817] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9c8fee0f-736b-45ca-bc96-7185230cd389 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.360379] env[68674]: DEBUG oslo_vmware.api [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 962.360379] env[68674]: value = "task-3240448" [ 962.360379] env[68674]: _type = "Task" [ 962.360379] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.368259] env[68674]: DEBUG oslo_vmware.api [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240448, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.427499] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6d250fcd-0605-415d-9422-b4e01ac9c493 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Lock "a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.179s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 962.636832] env[68674]: DEBUG nova.compute.utils [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 962.638415] env[68674]: DEBUG nova.compute.manager [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 962.638506] env[68674]: DEBUG nova.network.neutron [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 962.642547] env[68674]: INFO nova.compute.claims [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 962.677597] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dbe27768-9a9c-4447-b176-0bd43325f053 tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lock "7aa58e2f-1202-4252-9c38-ce53084c573f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.225s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 962.720210] env[68674]: DEBUG nova.policy [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5b5a988ac9d24d1492c499e210f61be7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4fbaef606f1948db867cd3a0c5ff3692', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 962.739164] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Releasing lock "refresh_cache-0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 962.740282] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.740282] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquired lock "0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 962.740282] env[68674]: DEBUG oslo_concurrency.lockutils [req-aa3bd841-da45-4b7f-af81-1a368a11c8e1 req-5d2501df-7968-4ceb-8aad-b17b47583804 service nova] Acquired lock "refresh_cache-0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 962.740461] env[68674]: DEBUG nova.network.neutron [req-aa3bd841-da45-4b7f-af81-1a368a11c8e1 req-5d2501df-7968-4ceb-8aad-b17b47583804 service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Refreshing network info cache for port d4a0023c-9d88-48c5-9362-16dd8aca5a74 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 962.742089] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a77a60e3-cb2a-4069-970a-ee276a281293 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.762020] env[68674]: DEBUG nova.virt.hardware [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 962.762273] env[68674]: DEBUG nova.virt.hardware [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 962.762456] env[68674]: DEBUG nova.virt.hardware [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 962.763307] env[68674]: DEBUG nova.virt.hardware [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 962.763307] env[68674]: DEBUG nova.virt.hardware [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 962.763307] env[68674]: DEBUG nova.virt.hardware [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 962.763307] env[68674]: DEBUG nova.virt.hardware [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 962.763583] env[68674]: DEBUG nova.virt.hardware [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 962.763583] env[68674]: DEBUG nova.virt.hardware [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 962.763701] env[68674]: DEBUG nova.virt.hardware [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 962.763869] env[68674]: DEBUG nova.virt.hardware [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 962.770197] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Reconfiguring VM to attach interface {{(pid=68674) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 962.770783] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-475b3d9c-6fd7-46c7-b3db-fc1c5b268d9a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.788038] env[68674]: DEBUG oslo_vmware.api [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 962.788038] env[68674]: value = "task-3240449" [ 962.788038] env[68674]: _type = "Task" [ 962.788038] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.795825] env[68674]: DEBUG oslo_vmware.api [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240449, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.871324] env[68674]: DEBUG oslo_vmware.api [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240448, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.167026} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.871691] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 962.872559] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4396b3fc-a159-4f7e-b399-f34a378be4cb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.897226] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Reconfiguring VM instance instance-00000056 to attach disk [datastore2] 6af32e52-f10e-47be-ab36-e130614ba9e8/6af32e52-f10e-47be-ab36-e130614ba9e8.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 962.897614] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-857b5cb0-4697-4942-bf92-00a578adb834 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.918462] env[68674]: DEBUG oslo_vmware.api [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 962.918462] env[68674]: value = "task-3240450" [ 962.918462] env[68674]: _type = "Task" [ 962.918462] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.928405] env[68674]: DEBUG oslo_vmware.api [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240450, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.148063] env[68674]: DEBUG nova.compute.manager [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 963.152224] env[68674]: INFO nova.compute.resource_tracker [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Updating resource usage from migration 8ebdc340-6af1-4ea5-99e1-3cf57304ed57 [ 963.300493] env[68674]: DEBUG oslo_vmware.api [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240449, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.366469] env[68674]: DEBUG nova.network.neutron [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Successfully created port: 452f8db5-332d-4019-ac05-2ad8f6360a0b {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 963.430670] env[68674]: DEBUG oslo_vmware.api [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240450, 'name': ReconfigVM_Task, 'duration_secs': 0.295207} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.433127] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Reconfigured VM instance instance-00000056 to attach disk [datastore2] 6af32e52-f10e-47be-ab36-e130614ba9e8/6af32e52-f10e-47be-ab36-e130614ba9e8.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 963.435037] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c24192e4-634e-42c2-b132-2b78f69ccc33 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.441430] env[68674]: DEBUG oslo_vmware.api [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 963.441430] env[68674]: value = "task-3240451" [ 963.441430] env[68674]: _type = "Task" [ 963.441430] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.452951] env[68674]: DEBUG oslo_vmware.api [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240451, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.469377] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e945366e-160c-4400-8aa3-1736eec7e5d4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.479946] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b9c0088-be09-4535-8be0-c600ee77af1d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.515042] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51c7278e-25ad-45bf-ae0f-80ef7b68425f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.524827] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-871f1abc-11ae-46b1-b4bd-bae41694563a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.542367] env[68674]: DEBUG nova.compute.provider_tree [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 963.713160] env[68674]: DEBUG nova.network.neutron [req-aa3bd841-da45-4b7f-af81-1a368a11c8e1 req-5d2501df-7968-4ceb-8aad-b17b47583804 service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Updated VIF entry in instance network info cache for port d4a0023c-9d88-48c5-9362-16dd8aca5a74. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 963.714016] env[68674]: DEBUG nova.network.neutron [req-aa3bd841-da45-4b7f-af81-1a368a11c8e1 req-5d2501df-7968-4ceb-8aad-b17b47583804 service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Updating instance_info_cache with network_info: [{"id": "7d9b4902-f03b-4046-b4ba-0bc1296918da", "address": "fa:16:3e:17:ab:03", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.195", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d9b4902-f0", "ovs_interfaceid": "7d9b4902-f03b-4046-b4ba-0bc1296918da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "861f9feb-a46d-4b29-851a-f958bd80ef86", "address": "fa:16:3e:a7:8f:46", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap861f9feb-a4", "ovs_interfaceid": "861f9feb-a46d-4b29-851a-f958bd80ef86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d4a0023c-9d88-48c5-9362-16dd8aca5a74", "address": "fa:16:3e:93:1a:51", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4a0023c-9d", "ovs_interfaceid": "d4a0023c-9d88-48c5-9362-16dd8aca5a74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.803808] env[68674]: DEBUG oslo_vmware.api [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240449, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.954566] env[68674]: DEBUG oslo_vmware.api [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240451, 'name': Rename_Task, 'duration_secs': 0.147275} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.954866] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 963.955140] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-22678f1a-3e62-4b8f-b72e-6aaddd9b59ae {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.961500] env[68674]: DEBUG oslo_vmware.api [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 963.961500] env[68674]: value = "task-3240452" [ 963.961500] env[68674]: _type = "Task" [ 963.961500] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.971509] env[68674]: DEBUG oslo_vmware.api [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240452, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.048807] env[68674]: DEBUG nova.scheduler.client.report [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 964.102447] env[68674]: DEBUG nova.compute.manager [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 964.103455] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f3ff823-f3b1-4d48-9765-dbae23440000 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.164031] env[68674]: DEBUG nova.compute.manager [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 964.196374] env[68674]: DEBUG nova.virt.hardware [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 964.196927] env[68674]: DEBUG nova.virt.hardware [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 964.197416] env[68674]: DEBUG nova.virt.hardware [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 964.197760] env[68674]: DEBUG nova.virt.hardware [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 964.198075] env[68674]: DEBUG nova.virt.hardware [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 964.198539] env[68674]: DEBUG nova.virt.hardware [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 964.198899] env[68674]: DEBUG nova.virt.hardware [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 964.199232] env[68674]: DEBUG nova.virt.hardware [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 964.199568] env[68674]: DEBUG nova.virt.hardware [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 964.200030] env[68674]: DEBUG nova.virt.hardware [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 964.200591] env[68674]: DEBUG nova.virt.hardware [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 964.202017] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd6d3b5b-cde7-4545-a8d7-dfdf97ab2452 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.210825] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-912b1718-4b2e-4797-a319-2abf6a3c64df {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.218431] env[68674]: DEBUG oslo_concurrency.lockutils [req-aa3bd841-da45-4b7f-af81-1a368a11c8e1 req-5d2501df-7968-4ceb-8aad-b17b47583804 service nova] Releasing lock "refresh_cache-0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 964.303618] env[68674]: DEBUG oslo_vmware.api [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240449, 'name': ReconfigVM_Task, 'duration_secs': 1.109241} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.304366] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Releasing lock "0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 964.304768] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Reconfigured VM to attach interface {{(pid=68674) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 964.474300] env[68674]: DEBUG oslo_vmware.api [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240452, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.554556] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.422s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 964.554994] env[68674]: INFO nova.compute.manager [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Migrating [ 964.569573] env[68674]: DEBUG oslo_concurrency.lockutils [None req-56b0d8fa-da46-4c8a-a377-990178beedcc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.969s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 964.569851] env[68674]: DEBUG nova.objects.instance [None req-56b0d8fa-da46-4c8a-a377-990178beedcc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lazy-loading 'resources' on Instance uuid e684ec31-b5d9-458c-bbba-36ada7f275bd {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 964.617142] env[68674]: INFO nova.compute.manager [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] instance snapshotting [ 964.620211] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a189025f-6824-40b0-b32a-4e3e70cca639 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.648930] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0394da5c-69f3-4eee-b75a-8f2f0b291d06 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.811095] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b07ff8f1-1171-493e-b047-f42479da7cba tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "interface-0e7c5243-ad33-4391-8977-b9019643e3de-d4a0023c-9d88-48c5-9362-16dd8aca5a74" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.658s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 964.835348] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0cb8741f-f1db-4a0d-8d1d-3b1f55754d0f tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquiring lock "77fa5a89-961b-4c84-a75e-a5be0253677e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 964.835554] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0cb8741f-f1db-4a0d-8d1d-3b1f55754d0f tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lock "77fa5a89-961b-4c84-a75e-a5be0253677e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 964.835779] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0cb8741f-f1db-4a0d-8d1d-3b1f55754d0f tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquiring lock "77fa5a89-961b-4c84-a75e-a5be0253677e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 964.835972] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0cb8741f-f1db-4a0d-8d1d-3b1f55754d0f tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lock "77fa5a89-961b-4c84-a75e-a5be0253677e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 964.836177] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0cb8741f-f1db-4a0d-8d1d-3b1f55754d0f tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lock "77fa5a89-961b-4c84-a75e-a5be0253677e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 964.838194] env[68674]: INFO nova.compute.manager [None req-0cb8741f-f1db-4a0d-8d1d-3b1f55754d0f tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Terminating instance [ 964.975089] env[68674]: DEBUG oslo_vmware.api [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240452, 'name': PowerOnVM_Task} progress is 90%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.084644] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "refresh_cache-ffdd1c62-1b4e-40cf-a27e-ff2877439701" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.084848] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquired lock "refresh_cache-ffdd1c62-1b4e-40cf-a27e-ff2877439701" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 965.085053] env[68674]: DEBUG nova.network.neutron [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 965.137865] env[68674]: DEBUG nova.network.neutron [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Successfully updated port: 452f8db5-332d-4019-ac05-2ad8f6360a0b {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 965.158940] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Creating Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 965.160289] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e7d23465-bda2-434d-96b7-698f9b661ce5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.169337] env[68674]: DEBUG oslo_vmware.api [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Waiting for the task: (returnval){ [ 965.169337] env[68674]: value = "task-3240453" [ 965.169337] env[68674]: _type = "Task" [ 965.169337] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.179924] env[68674]: DEBUG oslo_vmware.api [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240453, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.341578] env[68674]: DEBUG nova.compute.manager [None req-0cb8741f-f1db-4a0d-8d1d-3b1f55754d0f tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 965.341816] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0cb8741f-f1db-4a0d-8d1d-3b1f55754d0f tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 965.342662] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-587804dd-8ed9-434d-a29f-d2ed900631b6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.351106] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cb8741f-f1db-4a0d-8d1d-3b1f55754d0f tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 965.351227] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-271729ad-e2f9-411d-90e8-97c821cb7935 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.357073] env[68674]: DEBUG oslo_vmware.api [None req-0cb8741f-f1db-4a0d-8d1d-3b1f55754d0f tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 965.357073] env[68674]: value = "task-3240454" [ 965.357073] env[68674]: _type = "Task" [ 965.357073] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.358272] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7e43791-837a-45fb-9ee3-0eba058427fe {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.371123] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7194e209-752c-4787-8e51-658b15711925 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.374394] env[68674]: DEBUG oslo_vmware.api [None req-0cb8741f-f1db-4a0d-8d1d-3b1f55754d0f tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240454, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.403300] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c588f0cd-48ed-4f9b-8a5e-9fb073586b3c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.406521] env[68674]: DEBUG nova.compute.manager [req-3fc59ae6-f99c-4772-b9be-ccc133d4b918 req-062e3c31-783d-4906-b02c-9040cd81a59b service nova] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Received event network-vif-plugged-452f8db5-332d-4019-ac05-2ad8f6360a0b {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 965.406733] env[68674]: DEBUG oslo_concurrency.lockutils [req-3fc59ae6-f99c-4772-b9be-ccc133d4b918 req-062e3c31-783d-4906-b02c-9040cd81a59b service nova] Acquiring lock "691f9f14-4f53-46a4-8bf7-d027cfdd37e8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 965.406938] env[68674]: DEBUG oslo_concurrency.lockutils [req-3fc59ae6-f99c-4772-b9be-ccc133d4b918 req-062e3c31-783d-4906-b02c-9040cd81a59b service nova] Lock "691f9f14-4f53-46a4-8bf7-d027cfdd37e8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 965.407113] env[68674]: DEBUG oslo_concurrency.lockutils [req-3fc59ae6-f99c-4772-b9be-ccc133d4b918 req-062e3c31-783d-4906-b02c-9040cd81a59b service nova] Lock "691f9f14-4f53-46a4-8bf7-d027cfdd37e8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 965.407282] env[68674]: DEBUG nova.compute.manager [req-3fc59ae6-f99c-4772-b9be-ccc133d4b918 req-062e3c31-783d-4906-b02c-9040cd81a59b service nova] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] No waiting events found dispatching network-vif-plugged-452f8db5-332d-4019-ac05-2ad8f6360a0b {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 965.407445] env[68674]: WARNING nova.compute.manager [req-3fc59ae6-f99c-4772-b9be-ccc133d4b918 req-062e3c31-783d-4906-b02c-9040cd81a59b service nova] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Received unexpected event network-vif-plugged-452f8db5-332d-4019-ac05-2ad8f6360a0b for instance with vm_state building and task_state spawning. [ 965.413822] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5d2f0b7-f0ca-4ede-85a6-90d0b92440d6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.429932] env[68674]: DEBUG nova.compute.provider_tree [None req-56b0d8fa-da46-4c8a-a377-990178beedcc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 965.473493] env[68674]: DEBUG oslo_vmware.api [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240452, 'name': PowerOnVM_Task, 'duration_secs': 1.075731} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.473493] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 965.473686] env[68674]: INFO nova.compute.manager [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Took 8.80 seconds to spawn the instance on the hypervisor. [ 965.473880] env[68674]: DEBUG nova.compute.manager [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 965.474766] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e40f0517-d8c1-4af5-b590-fd1d3429b8b9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.640815] env[68674]: DEBUG oslo_concurrency.lockutils [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Acquiring lock "refresh_cache-691f9f14-4f53-46a4-8bf7-d027cfdd37e8" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.642211] env[68674]: DEBUG oslo_concurrency.lockutils [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Acquired lock "refresh_cache-691f9f14-4f53-46a4-8bf7-d027cfdd37e8" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 965.642211] env[68674]: DEBUG nova.network.neutron [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 965.688647] env[68674]: DEBUG oslo_vmware.api [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240453, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.859859] env[68674]: DEBUG nova.network.neutron [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Updating instance_info_cache with network_info: [{"id": "88af4dfc-59d6-4564-9ca9-d5383ed87da6", "address": "fa:16:3e:92:28:03", "network": {"id": "d412f884-932c-461f-8f04-990897b04532", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-692483706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6b179855b874365964446f95f9f5a53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap88af4dfc-59", "ovs_interfaceid": "88af4dfc-59d6-4564-9ca9-d5383ed87da6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.870780] env[68674]: DEBUG oslo_vmware.api [None req-0cb8741f-f1db-4a0d-8d1d-3b1f55754d0f tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240454, 'name': PowerOffVM_Task, 'duration_secs': 0.235819} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.871594] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cb8741f-f1db-4a0d-8d1d-3b1f55754d0f tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 965.871776] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0cb8741f-f1db-4a0d-8d1d-3b1f55754d0f tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 965.872047] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fb5c516a-46ea-4240-aaba-e350cc16e173 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.933967] env[68674]: DEBUG nova.scheduler.client.report [None req-56b0d8fa-da46-4c8a-a377-990178beedcc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 965.948416] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0cb8741f-f1db-4a0d-8d1d-3b1f55754d0f tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 965.948575] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0cb8741f-f1db-4a0d-8d1d-3b1f55754d0f tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 965.949163] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cb8741f-f1db-4a0d-8d1d-3b1f55754d0f tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Deleting the datastore file [datastore1] 77fa5a89-961b-4c84-a75e-a5be0253677e {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 965.949163] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0328fc0d-f7f8-4c94-b7e3-64430e85cb9a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.955649] env[68674]: DEBUG oslo_vmware.api [None req-0cb8741f-f1db-4a0d-8d1d-3b1f55754d0f tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for the task: (returnval){ [ 965.955649] env[68674]: value = "task-3240456" [ 965.955649] env[68674]: _type = "Task" [ 965.955649] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.963905] env[68674]: DEBUG oslo_vmware.api [None req-0cb8741f-f1db-4a0d-8d1d-3b1f55754d0f tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240456, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.996129] env[68674]: INFO nova.compute.manager [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Took 42.23 seconds to build instance. [ 966.184937] env[68674]: DEBUG nova.network.neutron [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 966.187450] env[68674]: DEBUG oslo_vmware.api [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240453, 'name': CreateSnapshot_Task, 'duration_secs': 0.983879} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.188077] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Created Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 966.189453] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3ef0ba4-2193-4543-b596-11ecf459ae97 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.365765] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Releasing lock "refresh_cache-ffdd1c62-1b4e-40cf-a27e-ff2877439701" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 966.425463] env[68674]: DEBUG nova.network.neutron [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Updating instance_info_cache with network_info: [{"id": "452f8db5-332d-4019-ac05-2ad8f6360a0b", "address": "fa:16:3e:bc:b7:c9", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.222", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap452f8db5-33", "ovs_interfaceid": "452f8db5-332d-4019-ac05-2ad8f6360a0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.441032] env[68674]: DEBUG oslo_concurrency.lockutils [None req-56b0d8fa-da46-4c8a-a377-990178beedcc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.870s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 966.443746] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.039s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 966.444013] env[68674]: DEBUG nova.objects.instance [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lazy-loading 'pci_requests' on Instance uuid 63d6c185-db2c-4ede-a716-9a0dd432ab1f {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 966.468688] env[68674]: DEBUG oslo_vmware.api [None req-0cb8741f-f1db-4a0d-8d1d-3b1f55754d0f tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Task: {'id': task-3240456, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.238309} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.468688] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cb8741f-f1db-4a0d-8d1d-3b1f55754d0f tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 966.468688] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0cb8741f-f1db-4a0d-8d1d-3b1f55754d0f tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 966.468688] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0cb8741f-f1db-4a0d-8d1d-3b1f55754d0f tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 966.468688] env[68674]: INFO nova.compute.manager [None req-0cb8741f-f1db-4a0d-8d1d-3b1f55754d0f tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Took 1.13 seconds to destroy the instance on the hypervisor. [ 966.468688] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0cb8741f-f1db-4a0d-8d1d-3b1f55754d0f tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 966.468905] env[68674]: INFO nova.scheduler.client.report [None req-56b0d8fa-da46-4c8a-a377-990178beedcc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Deleted allocations for instance e684ec31-b5d9-458c-bbba-36ada7f275bd [ 966.470005] env[68674]: DEBUG nova.compute.manager [-] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 966.470005] env[68674]: DEBUG nova.network.neutron [-] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 966.498705] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c655aa87-86cb-4203-9581-f8b37720a64f tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "6af32e52-f10e-47be-ab36-e130614ba9e8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.739s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 966.689580] env[68674]: DEBUG oslo_concurrency.lockutils [None req-04a9f848-5acb-4f79-8a83-4774f7bdec07 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Acquiring lock "e894cd36-95c8-473b-9bbd-483f11fb5add" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 966.692187] env[68674]: DEBUG oslo_concurrency.lockutils [None req-04a9f848-5acb-4f79-8a83-4774f7bdec07 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Lock "e894cd36-95c8-473b-9bbd-483f11fb5add" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 966.692677] env[68674]: DEBUG oslo_concurrency.lockutils [None req-04a9f848-5acb-4f79-8a83-4774f7bdec07 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Acquiring lock "e894cd36-95c8-473b-9bbd-483f11fb5add-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 966.692973] env[68674]: DEBUG oslo_concurrency.lockutils [None req-04a9f848-5acb-4f79-8a83-4774f7bdec07 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Lock "e894cd36-95c8-473b-9bbd-483f11fb5add-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 966.693648] env[68674]: DEBUG oslo_concurrency.lockutils [None req-04a9f848-5acb-4f79-8a83-4774f7bdec07 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Lock "e894cd36-95c8-473b-9bbd-483f11fb5add-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 966.699376] env[68674]: INFO nova.compute.manager [None req-04a9f848-5acb-4f79-8a83-4774f7bdec07 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Terminating instance [ 966.712943] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Creating linked-clone VM from snapshot {{(pid=68674) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 966.714759] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-9019c0b4-046b-485c-a958-21ccad2b9af0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.725136] env[68674]: DEBUG oslo_vmware.api [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Waiting for the task: (returnval){ [ 966.725136] env[68674]: value = "task-3240457" [ 966.725136] env[68674]: _type = "Task" [ 966.725136] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.735927] env[68674]: DEBUG oslo_vmware.api [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240457, 'name': CloneVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.922246] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6bb977a2-886e-43ec-a97f-f427092f1ded tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "interface-0e7c5243-ad33-4391-8977-b9019643e3de-861f9feb-a46d-4b29-851a-f958bd80ef86" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 966.922246] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6bb977a2-886e-43ec-a97f-f427092f1ded tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "interface-0e7c5243-ad33-4391-8977-b9019643e3de-861f9feb-a46d-4b29-851a-f958bd80ef86" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 966.931022] env[68674]: DEBUG oslo_concurrency.lockutils [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Releasing lock "refresh_cache-691f9f14-4f53-46a4-8bf7-d027cfdd37e8" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 966.931022] env[68674]: DEBUG nova.compute.manager [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Instance network_info: |[{"id": "452f8db5-332d-4019-ac05-2ad8f6360a0b", "address": "fa:16:3e:bc:b7:c9", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.222", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap452f8db5-33", "ovs_interfaceid": "452f8db5-332d-4019-ac05-2ad8f6360a0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 966.931333] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bc:b7:c9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'df1bf911-aac9-4d2d-ae69-66ace3e6a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '452f8db5-332d-4019-ac05-2ad8f6360a0b', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 966.942408] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 966.946022] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 966.946022] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1225225e-2e99-448f-b6ee-702804c9056c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.961236] env[68674]: DEBUG nova.objects.instance [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lazy-loading 'numa_topology' on Instance uuid 63d6c185-db2c-4ede-a716-9a0dd432ab1f {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 966.968754] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 966.968754] env[68674]: value = "task-3240458" [ 966.968754] env[68674]: _type = "Task" [ 966.968754] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.980305] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240458, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.986027] env[68674]: DEBUG oslo_concurrency.lockutils [None req-56b0d8fa-da46-4c8a-a377-990178beedcc tempest-ImagesTestJSON-633451892 tempest-ImagesTestJSON-633451892-project-member] Lock "e684ec31-b5d9-458c-bbba-36ada7f275bd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.854s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 967.220776] env[68674]: DEBUG nova.compute.manager [None req-04a9f848-5acb-4f79-8a83-4774f7bdec07 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 967.221081] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-04a9f848-5acb-4f79-8a83-4774f7bdec07 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 967.222061] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab1189ff-9584-4214-a89d-1d3d43fb17d2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.232108] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-04a9f848-5acb-4f79-8a83-4774f7bdec07 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 967.232860] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1b00209a-ae55-4401-99de-9abc58712205 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.239474] env[68674]: DEBUG oslo_vmware.api [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240457, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.244986] env[68674]: DEBUG oslo_vmware.api [None req-04a9f848-5acb-4f79-8a83-4774f7bdec07 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Waiting for the task: (returnval){ [ 967.244986] env[68674]: value = "task-3240459" [ 967.244986] env[68674]: _type = "Task" [ 967.244986] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.251784] env[68674]: DEBUG oslo_vmware.api [None req-04a9f848-5acb-4f79-8a83-4774f7bdec07 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Task: {'id': task-3240459, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.340962] env[68674]: DEBUG nova.network.neutron [-] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.401059] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fb8808ce-b3eb-4cfe-be3a-811ccd535358 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "6af32e52-f10e-47be-ab36-e130614ba9e8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 967.401434] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fb8808ce-b3eb-4cfe-be3a-811ccd535358 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "6af32e52-f10e-47be-ab36-e130614ba9e8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 967.401654] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fb8808ce-b3eb-4cfe-be3a-811ccd535358 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "6af32e52-f10e-47be-ab36-e130614ba9e8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 967.402135] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fb8808ce-b3eb-4cfe-be3a-811ccd535358 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "6af32e52-f10e-47be-ab36-e130614ba9e8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 967.402135] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fb8808ce-b3eb-4cfe-be3a-811ccd535358 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "6af32e52-f10e-47be-ab36-e130614ba9e8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 967.406330] env[68674]: INFO nova.compute.manager [None req-fb8808ce-b3eb-4cfe-be3a-811ccd535358 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Terminating instance [ 967.428614] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6bb977a2-886e-43ec-a97f-f427092f1ded tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 967.428828] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6bb977a2-886e-43ec-a97f-f427092f1ded tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquired lock "0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 967.431978] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e9166f7-7fd0-4cba-87f7-e93befc80191 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.455145] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa5ce0ea-5063-4f08-8b14-d48e3222550a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.480017] env[68674]: INFO nova.compute.claims [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 967.491596] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6bb977a2-886e-43ec-a97f-f427092f1ded tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Reconfiguring VM to detach interface {{(pid=68674) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 967.496774] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2360e8c4-e49d-4790-b1e4-408ed0ffa4b9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.518733] env[68674]: DEBUG nova.compute.manager [req-f1889d29-1493-4a75-9845-c3075cc4b916 req-3b1d9537-4234-4b9e-8e39-ddaaf08da656 service nova] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Received event network-changed-452f8db5-332d-4019-ac05-2ad8f6360a0b {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 967.519042] env[68674]: DEBUG nova.compute.manager [req-f1889d29-1493-4a75-9845-c3075cc4b916 req-3b1d9537-4234-4b9e-8e39-ddaaf08da656 service nova] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Refreshing instance network info cache due to event network-changed-452f8db5-332d-4019-ac05-2ad8f6360a0b. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 967.519154] env[68674]: DEBUG oslo_concurrency.lockutils [req-f1889d29-1493-4a75-9845-c3075cc4b916 req-3b1d9537-4234-4b9e-8e39-ddaaf08da656 service nova] Acquiring lock "refresh_cache-691f9f14-4f53-46a4-8bf7-d027cfdd37e8" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 967.519281] env[68674]: DEBUG oslo_concurrency.lockutils [req-f1889d29-1493-4a75-9845-c3075cc4b916 req-3b1d9537-4234-4b9e-8e39-ddaaf08da656 service nova] Acquired lock "refresh_cache-691f9f14-4f53-46a4-8bf7-d027cfdd37e8" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 967.519482] env[68674]: DEBUG nova.network.neutron [req-f1889d29-1493-4a75-9845-c3075cc4b916 req-3b1d9537-4234-4b9e-8e39-ddaaf08da656 service nova] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Refreshing network info cache for port 452f8db5-332d-4019-ac05-2ad8f6360a0b {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 967.527947] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240458, 'name': CreateVM_Task, 'duration_secs': 0.52227} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.529278] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 967.529684] env[68674]: DEBUG oslo_vmware.api [None req-6bb977a2-886e-43ec-a97f-f427092f1ded tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 967.529684] env[68674]: value = "task-3240460" [ 967.529684] env[68674]: _type = "Task" [ 967.529684] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.530956] env[68674]: DEBUG oslo_concurrency.lockutils [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 967.530956] env[68674]: DEBUG oslo_concurrency.lockutils [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 967.530956] env[68674]: DEBUG oslo_concurrency.lockutils [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 967.531199] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50926604-15d0-4d04-88cd-d19ebdb30b40 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.541039] env[68674]: DEBUG oslo_vmware.api [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Waiting for the task: (returnval){ [ 967.541039] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]520a6bda-6f00-3d3e-3c6c-298055f4cada" [ 967.541039] env[68674]: _type = "Task" [ 967.541039] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.544726] env[68674]: DEBUG oslo_vmware.api [None req-6bb977a2-886e-43ec-a97f-f427092f1ded tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240460, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.555239] env[68674]: DEBUG oslo_vmware.api [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]520a6bda-6f00-3d3e-3c6c-298055f4cada, 'name': SearchDatastore_Task, 'duration_secs': 0.011274} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.555834] env[68674]: DEBUG oslo_concurrency.lockutils [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 967.555834] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 967.556388] env[68674]: DEBUG oslo_concurrency.lockutils [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 967.556388] env[68674]: DEBUG oslo_concurrency.lockutils [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 967.556388] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 967.556677] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-718a508d-005c-47e0-b0d1-3b866be78be1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.566833] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 967.566833] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 967.567498] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f735f50-1a94-4e71-9143-f655c8efa7f7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.576518] env[68674]: DEBUG oslo_vmware.api [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Waiting for the task: (returnval){ [ 967.576518] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5244f61b-8e7a-44f4-c49f-2ad573c6797f" [ 967.576518] env[68674]: _type = "Task" [ 967.576518] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.586726] env[68674]: DEBUG oslo_vmware.api [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5244f61b-8e7a-44f4-c49f-2ad573c6797f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.737702] env[68674]: DEBUG oslo_vmware.api [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240457, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.754559] env[68674]: DEBUG oslo_vmware.api [None req-04a9f848-5acb-4f79-8a83-4774f7bdec07 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Task: {'id': task-3240459, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.844767] env[68674]: INFO nova.compute.manager [-] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Took 1.37 seconds to deallocate network for instance. [ 967.883670] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f7e0b55-d9d4-4d84-822c-a6a50e4b59dc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.902800] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Updating instance 'ffdd1c62-1b4e-40cf-a27e-ff2877439701' progress to 0 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 967.915430] env[68674]: DEBUG nova.compute.manager [None req-fb8808ce-b3eb-4cfe-be3a-811ccd535358 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 967.915661] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8808ce-b3eb-4cfe-be3a-811ccd535358 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 967.916514] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0508da8c-6a4b-4676-9cbf-08605236af5d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.924488] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb8808ce-b3eb-4cfe-be3a-811ccd535358 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 967.925337] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-87624d01-ffc8-458a-aca1-cfcf3084e132 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.931515] env[68674]: DEBUG oslo_vmware.api [None req-fb8808ce-b3eb-4cfe-be3a-811ccd535358 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 967.931515] env[68674]: value = "task-3240461" [ 967.931515] env[68674]: _type = "Task" [ 967.931515] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.940073] env[68674]: DEBUG oslo_vmware.api [None req-fb8808ce-b3eb-4cfe-be3a-811ccd535358 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240461, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.042904] env[68674]: DEBUG oslo_vmware.api [None req-6bb977a2-886e-43ec-a97f-f427092f1ded tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240460, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.086760] env[68674]: DEBUG oslo_vmware.api [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5244f61b-8e7a-44f4-c49f-2ad573c6797f, 'name': SearchDatastore_Task, 'duration_secs': 0.010616} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.087841] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f278cfa-d4b7-4d26-8770-4305453d1b87 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.092697] env[68674]: DEBUG oslo_vmware.api [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Waiting for the task: (returnval){ [ 968.092697] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528cc72e-0d53-55dd-a8e1-dc3304528048" [ 968.092697] env[68674]: _type = "Task" [ 968.092697] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.101856] env[68674]: DEBUG oslo_vmware.api [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528cc72e-0d53-55dd-a8e1-dc3304528048, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.175611] env[68674]: DEBUG oslo_concurrency.lockutils [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Acquiring lock "bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.176503] env[68674]: DEBUG oslo_concurrency.lockutils [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Lock "bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 968.242551] env[68674]: DEBUG oslo_vmware.api [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240457, 'name': CloneVM_Task} progress is 95%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.258771] env[68674]: DEBUG oslo_vmware.api [None req-04a9f848-5acb-4f79-8a83-4774f7bdec07 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Task: {'id': task-3240459, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.319592] env[68674]: DEBUG nova.network.neutron [req-f1889d29-1493-4a75-9845-c3075cc4b916 req-3b1d9537-4234-4b9e-8e39-ddaaf08da656 service nova] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Updated VIF entry in instance network info cache for port 452f8db5-332d-4019-ac05-2ad8f6360a0b. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 968.320015] env[68674]: DEBUG nova.network.neutron [req-f1889d29-1493-4a75-9845-c3075cc4b916 req-3b1d9537-4234-4b9e-8e39-ddaaf08da656 service nova] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Updating instance_info_cache with network_info: [{"id": "452f8db5-332d-4019-ac05-2ad8f6360a0b", "address": "fa:16:3e:bc:b7:c9", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.222", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap452f8db5-33", "ovs_interfaceid": "452f8db5-332d-4019-ac05-2ad8f6360a0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 968.350938] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0cb8741f-f1db-4a0d-8d1d-3b1f55754d0f tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.410688] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 968.411116] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2348026b-4b0e-4a84-ae9a-19701f1fbcf3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.419873] env[68674]: DEBUG oslo_vmware.api [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 968.419873] env[68674]: value = "task-3240462" [ 968.419873] env[68674]: _type = "Task" [ 968.419873] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.429086] env[68674]: DEBUG oslo_vmware.api [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240462, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.440134] env[68674]: DEBUG oslo_vmware.api [None req-fb8808ce-b3eb-4cfe-be3a-811ccd535358 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240461, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.542457] env[68674]: DEBUG oslo_vmware.api [None req-6bb977a2-886e-43ec-a97f-f427092f1ded tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240460, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.606318] env[68674]: DEBUG oslo_vmware.api [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528cc72e-0d53-55dd-a8e1-dc3304528048, 'name': SearchDatastore_Task, 'duration_secs': 0.014898} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.606599] env[68674]: DEBUG oslo_concurrency.lockutils [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 968.606858] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 691f9f14-4f53-46a4-8bf7-d027cfdd37e8/691f9f14-4f53-46a4-8bf7-d027cfdd37e8.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 968.607139] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-057ddc30-449d-4705-b7f8-2a5eeccfa401 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.613815] env[68674]: DEBUG oslo_vmware.api [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Waiting for the task: (returnval){ [ 968.613815] env[68674]: value = "task-3240463" [ 968.613815] env[68674]: _type = "Task" [ 968.613815] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.627242] env[68674]: DEBUG oslo_vmware.api [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': task-3240463, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.682278] env[68674]: DEBUG nova.compute.manager [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 968.743264] env[68674]: DEBUG oslo_vmware.api [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240457, 'name': CloneVM_Task, 'duration_secs': 1.817287} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.744221] env[68674]: INFO nova.virt.vmwareapi.vmops [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Created linked-clone VM from snapshot [ 968.744985] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d72471d0-9400-4db3-9a53-445c80c37f22 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.758928] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Uploading image fb80fca4-b398-4718-91d5-03a169cdace7 {{(pid=68674) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 968.765427] env[68674]: DEBUG oslo_vmware.api [None req-04a9f848-5acb-4f79-8a83-4774f7bdec07 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Task: {'id': task-3240459, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.797421] env[68674]: DEBUG oslo_vmware.rw_handles [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 968.797421] env[68674]: value = "vm-647624" [ 968.797421] env[68674]: _type = "VirtualMachine" [ 968.797421] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 968.797714] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-b7c06c8a-f1ea-48a9-b3e0-9eb74adcf574 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.807808] env[68674]: DEBUG oslo_vmware.rw_handles [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Lease: (returnval){ [ 968.807808] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5246bd32-3890-4c0f-596e-7561b4db7b30" [ 968.807808] env[68674]: _type = "HttpNfcLease" [ 968.807808] env[68674]: } obtained for exporting VM: (result){ [ 968.807808] env[68674]: value = "vm-647624" [ 968.807808] env[68674]: _type = "VirtualMachine" [ 968.807808] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 968.808080] env[68674]: DEBUG oslo_vmware.api [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Waiting for the lease: (returnval){ [ 968.808080] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5246bd32-3890-4c0f-596e-7561b4db7b30" [ 968.808080] env[68674]: _type = "HttpNfcLease" [ 968.808080] env[68674]: } to be ready. {{(pid=68674) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 968.814648] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 968.814648] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5246bd32-3890-4c0f-596e-7561b4db7b30" [ 968.814648] env[68674]: _type = "HttpNfcLease" [ 968.814648] env[68674]: } is initializing. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 968.822979] env[68674]: DEBUG oslo_concurrency.lockutils [req-f1889d29-1493-4a75-9845-c3075cc4b916 req-3b1d9537-4234-4b9e-8e39-ddaaf08da656 service nova] Releasing lock "refresh_cache-691f9f14-4f53-46a4-8bf7-d027cfdd37e8" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 968.823348] env[68674]: DEBUG nova.compute.manager [req-f1889d29-1493-4a75-9845-c3075cc4b916 req-3b1d9537-4234-4b9e-8e39-ddaaf08da656 service nova] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Received event network-vif-deleted-b1a3099e-550f-4bc4-a4b5-1fe1e04ea342 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 968.858608] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73919146-4f9e-4b86-89e6-1c6863f07133 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.866773] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2d084b7-c4a0-4d78-9417-7e3c2d0a4069 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.901939] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7ea0f61-06d7-4e82-a868-fd625538f308 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.910189] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1f71472-5c7a-4dc5-9fd1-15ad33699a32 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.926840] env[68674]: DEBUG nova.compute.provider_tree [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 968.948750] env[68674]: DEBUG oslo_vmware.api [None req-fb8808ce-b3eb-4cfe-be3a-811ccd535358 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240461, 'name': PowerOffVM_Task, 'duration_secs': 0.748605} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.949009] env[68674]: DEBUG oslo_vmware.api [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240462, 'name': PowerOffVM_Task, 'duration_secs': 0.261038} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.949271] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb8808ce-b3eb-4cfe-be3a-811ccd535358 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 968.949444] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8808ce-b3eb-4cfe-be3a-811ccd535358 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 968.949703] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 968.949880] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Updating instance 'ffdd1c62-1b4e-40cf-a27e-ff2877439701' progress to 17 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 968.953739] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ab131770-5336-440b-868f-f9144c8ecce5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.970885] env[68674]: DEBUG oslo_concurrency.lockutils [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Acquiring lock "7a13c52a-328a-4baa-827f-4f2e9cd29269" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.970885] env[68674]: DEBUG oslo_concurrency.lockutils [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Lock "7a13c52a-328a-4baa-827f-4f2e9cd29269" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 969.030921] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8808ce-b3eb-4cfe-be3a-811ccd535358 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 969.030921] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8808ce-b3eb-4cfe-be3a-811ccd535358 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 969.031168] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb8808ce-b3eb-4cfe-be3a-811ccd535358 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Deleting the datastore file [datastore2] 6af32e52-f10e-47be-ab36-e130614ba9e8 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 969.032492] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e577daec-a3c3-4368-a8e2-5c09f7fe0112 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.043061] env[68674]: DEBUG oslo_vmware.api [None req-fb8808ce-b3eb-4cfe-be3a-811ccd535358 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 969.043061] env[68674]: value = "task-3240466" [ 969.043061] env[68674]: _type = "Task" [ 969.043061] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.047043] env[68674]: DEBUG oslo_vmware.api [None req-6bb977a2-886e-43ec-a97f-f427092f1ded tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240460, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.055580] env[68674]: DEBUG oslo_vmware.api [None req-fb8808ce-b3eb-4cfe-be3a-811ccd535358 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240466, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.123355] env[68674]: DEBUG oslo_vmware.api [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': task-3240463, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.506477} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.123633] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 691f9f14-4f53-46a4-8bf7-d027cfdd37e8/691f9f14-4f53-46a4-8bf7-d027cfdd37e8.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 969.123959] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 969.124119] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-441af877-6854-4541-9b54-b9123da48059 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.131126] env[68674]: DEBUG oslo_vmware.api [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Waiting for the task: (returnval){ [ 969.131126] env[68674]: value = "task-3240467" [ 969.131126] env[68674]: _type = "Task" [ 969.131126] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.141362] env[68674]: DEBUG oslo_vmware.api [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': task-3240467, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.212363] env[68674]: DEBUG oslo_concurrency.lockutils [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 969.263324] env[68674]: DEBUG oslo_vmware.api [None req-04a9f848-5acb-4f79-8a83-4774f7bdec07 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Task: {'id': task-3240459, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.316716] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 969.316716] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5246bd32-3890-4c0f-596e-7561b4db7b30" [ 969.316716] env[68674]: _type = "HttpNfcLease" [ 969.316716] env[68674]: } is ready. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 969.316716] env[68674]: DEBUG oslo_vmware.rw_handles [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 969.316716] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5246bd32-3890-4c0f-596e-7561b4db7b30" [ 969.316716] env[68674]: _type = "HttpNfcLease" [ 969.316716] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 969.317282] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b6cd566-6b8f-4358-be32-bb27ea00e1d0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.324675] env[68674]: DEBUG oslo_vmware.rw_handles [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520d21c7-e2d9-8c3d-717a-dbd89cdd9f8a/disk-0.vmdk from lease info. {{(pid=68674) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 969.324871] env[68674]: DEBUG oslo_vmware.rw_handles [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520d21c7-e2d9-8c3d-717a-dbd89cdd9f8a/disk-0.vmdk for reading. {{(pid=68674) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 969.432578] env[68674]: DEBUG nova.scheduler.client.report [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 969.448034] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-8824dddb-a914-475d-b2ec-3b3951a52c5c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.459422] env[68674]: DEBUG nova.virt.hardware [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 969.459422] env[68674]: DEBUG nova.virt.hardware [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 969.459422] env[68674]: DEBUG nova.virt.hardware [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 969.459422] env[68674]: DEBUG nova.virt.hardware [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 969.459422] env[68674]: DEBUG nova.virt.hardware [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 969.459882] env[68674]: DEBUG nova.virt.hardware [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 969.459882] env[68674]: DEBUG nova.virt.hardware [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 969.459882] env[68674]: DEBUG nova.virt.hardware [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 969.460046] env[68674]: DEBUG nova.virt.hardware [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 969.460213] env[68674]: DEBUG nova.virt.hardware [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 969.460376] env[68674]: DEBUG nova.virt.hardware [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 969.467986] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cfbd9a7d-b1d2-4a68-94e4-1625196fc056 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.480806] env[68674]: DEBUG nova.compute.manager [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 969.488039] env[68674]: DEBUG oslo_vmware.api [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 969.488039] env[68674]: value = "task-3240468" [ 969.488039] env[68674]: _type = "Task" [ 969.488039] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.502447] env[68674]: DEBUG oslo_vmware.api [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240468, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.546770] env[68674]: DEBUG oslo_vmware.api [None req-6bb977a2-886e-43ec-a97f-f427092f1ded tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240460, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.555194] env[68674]: DEBUG oslo_vmware.api [None req-fb8808ce-b3eb-4cfe-be3a-811ccd535358 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240466, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.202246} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.555478] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb8808ce-b3eb-4cfe-be3a-811ccd535358 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 969.555800] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8808ce-b3eb-4cfe-be3a-811ccd535358 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 969.555855] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8808ce-b3eb-4cfe-be3a-811ccd535358 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 969.556060] env[68674]: INFO nova.compute.manager [None req-fb8808ce-b3eb-4cfe-be3a-811ccd535358 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Took 1.64 seconds to destroy the instance on the hypervisor. [ 969.556317] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fb8808ce-b3eb-4cfe-be3a-811ccd535358 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 969.556505] env[68674]: DEBUG nova.compute.manager [-] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 969.556814] env[68674]: DEBUG nova.network.neutron [-] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 969.642192] env[68674]: DEBUG oslo_vmware.api [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': task-3240467, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069698} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.642528] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 969.643674] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e32223d-620c-4397-a271-3404bbfc933e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.671133] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] 691f9f14-4f53-46a4-8bf7-d027cfdd37e8/691f9f14-4f53-46a4-8bf7-d027cfdd37e8.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 969.671835] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b615fbd-452c-43ab-bca4-eb75799978a5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.694034] env[68674]: DEBUG oslo_vmware.api [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Waiting for the task: (returnval){ [ 969.694034] env[68674]: value = "task-3240469" [ 969.694034] env[68674]: _type = "Task" [ 969.694034] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.701597] env[68674]: DEBUG oslo_vmware.api [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': task-3240469, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.758966] env[68674]: DEBUG oslo_vmware.api [None req-04a9f848-5acb-4f79-8a83-4774f7bdec07 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Task: {'id': task-3240459, 'name': PowerOffVM_Task, 'duration_secs': 2.082616} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.759481] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-04a9f848-5acb-4f79-8a83-4774f7bdec07 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 969.759780] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-04a9f848-5acb-4f79-8a83-4774f7bdec07 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 969.760159] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1ea674e3-27be-48f5-88fe-8d74ab0d614d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.832364] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-04a9f848-5acb-4f79-8a83-4774f7bdec07 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 969.832623] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-04a9f848-5acb-4f79-8a83-4774f7bdec07 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 969.835015] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-04a9f848-5acb-4f79-8a83-4774f7bdec07 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Deleting the datastore file [datastore1] e894cd36-95c8-473b-9bbd-483f11fb5add {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 969.835015] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e4e429cb-0725-4ae1-b27e-fb9fecc053c6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.841482] env[68674]: DEBUG oslo_vmware.api [None req-04a9f848-5acb-4f79-8a83-4774f7bdec07 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Waiting for the task: (returnval){ [ 969.841482] env[68674]: value = "task-3240471" [ 969.841482] env[68674]: _type = "Task" [ 969.841482] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.848567] env[68674]: DEBUG oslo_vmware.api [None req-04a9f848-5acb-4f79-8a83-4774f7bdec07 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Task: {'id': task-3240471, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.938719] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.496s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 969.941216] env[68674]: DEBUG oslo_concurrency.lockutils [None req-90dde390-96cc-4651-aa04-5dee38987e6b tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.203s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 969.941444] env[68674]: DEBUG nova.objects.instance [None req-90dde390-96cc-4651-aa04-5dee38987e6b tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Lazy-loading 'resources' on Instance uuid 33313b29-abaf-4ff7-9182-abfcfb9b3220 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 969.979184] env[68674]: DEBUG nova.compute.manager [req-8e9b8db5-f553-4f1b-b580-28d73097526e req-7e2945fc-3d7b-4e1e-8cc7-9698d2a6ba4c service nova] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Received event network-vif-deleted-d16f8563-f821-46f1-ae60-2096ac4a4486 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 969.979539] env[68674]: INFO nova.compute.manager [req-8e9b8db5-f553-4f1b-b580-28d73097526e req-7e2945fc-3d7b-4e1e-8cc7-9698d2a6ba4c service nova] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Neutron deleted interface d16f8563-f821-46f1-ae60-2096ac4a4486; detaching it from the instance and deleting it from the info cache [ 969.980232] env[68674]: DEBUG nova.network.neutron [req-8e9b8db5-f553-4f1b-b580-28d73097526e req-7e2945fc-3d7b-4e1e-8cc7-9698d2a6ba4c service nova] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.981793] env[68674]: INFO nova.network.neutron [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Updating port 2cf52206-a0c1-4b57-886d-23df69181f20 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 970.009901] env[68674]: DEBUG oslo_vmware.api [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240468, 'name': ReconfigVM_Task, 'duration_secs': 0.233509} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.012426] env[68674]: DEBUG oslo_concurrency.lockutils [None req-41d55833-31f4-4777-baef-bb0a6da9377e tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "2d02adff-9fbf-4889-99e4-4efde5a51b33" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 970.012765] env[68674]: DEBUG oslo_concurrency.lockutils [None req-41d55833-31f4-4777-baef-bb0a6da9377e tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "2d02adff-9fbf-4889-99e4-4efde5a51b33" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 970.014651] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Updating instance 'ffdd1c62-1b4e-40cf-a27e-ff2877439701' progress to 33 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 970.020160] env[68674]: DEBUG oslo_concurrency.lockutils [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 970.047716] env[68674]: DEBUG oslo_vmware.api [None req-6bb977a2-886e-43ec-a97f-f427092f1ded tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240460, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.206718] env[68674]: DEBUG oslo_vmware.api [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': task-3240469, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.349611] env[68674]: DEBUG oslo_vmware.api [None req-04a9f848-5acb-4f79-8a83-4774f7bdec07 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Task: {'id': task-3240471, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.351751} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.349919] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-04a9f848-5acb-4f79-8a83-4774f7bdec07 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 970.350171] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-04a9f848-5acb-4f79-8a83-4774f7bdec07 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 970.350366] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-04a9f848-5acb-4f79-8a83-4774f7bdec07 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 970.350559] env[68674]: INFO nova.compute.manager [None req-04a9f848-5acb-4f79-8a83-4774f7bdec07 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Took 3.13 seconds to destroy the instance on the hypervisor. [ 970.350870] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-04a9f848-5acb-4f79-8a83-4774f7bdec07 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 970.351188] env[68674]: DEBUG nova.compute.manager [-] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 970.351332] env[68674]: DEBUG nova.network.neutron [-] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 970.417269] env[68674]: DEBUG nova.network.neutron [-] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.495943] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c673feef-db0b-4f80-a6cc-97b6080b8812 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.505378] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6d14fe9-7453-418e-883e-5b8997206c0c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.524820] env[68674]: DEBUG nova.compute.utils [None req-41d55833-31f4-4777-baef-bb0a6da9377e tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 970.529777] env[68674]: DEBUG nova.virt.hardware [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 970.530082] env[68674]: DEBUG nova.virt.hardware [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 970.530242] env[68674]: DEBUG nova.virt.hardware [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 970.530453] env[68674]: DEBUG nova.virt.hardware [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 970.530811] env[68674]: DEBUG nova.virt.hardware [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 970.531148] env[68674]: DEBUG nova.virt.hardware [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 970.531499] env[68674]: DEBUG nova.virt.hardware [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 970.531706] env[68674]: DEBUG nova.virt.hardware [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 970.531974] env[68674]: DEBUG nova.virt.hardware [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 970.532314] env[68674]: DEBUG nova.virt.hardware [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 970.532681] env[68674]: DEBUG nova.virt.hardware [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 970.538784] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Reconfiguring VM instance instance-00000050 to detach disk 2000 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 970.540801] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cdab3899-b54a-4d6f-b22f-ee47cc5b1c95 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.583744] env[68674]: DEBUG nova.compute.manager [req-8e9b8db5-f553-4f1b-b580-28d73097526e req-7e2945fc-3d7b-4e1e-8cc7-9698d2a6ba4c service nova] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Detach interface failed, port_id=d16f8563-f821-46f1-ae60-2096ac4a4486, reason: Instance 6af32e52-f10e-47be-ab36-e130614ba9e8 could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 970.589919] env[68674]: DEBUG oslo_vmware.api [None req-6bb977a2-886e-43ec-a97f-f427092f1ded tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240460, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.591705] env[68674]: DEBUG oslo_vmware.api [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 970.591705] env[68674]: value = "task-3240472" [ 970.591705] env[68674]: _type = "Task" [ 970.591705] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.603379] env[68674]: DEBUG oslo_vmware.api [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240472, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.703115] env[68674]: DEBUG oslo_vmware.api [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': task-3240469, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.866595] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd15e4a4-6d93-4387-af79-8672b0df7533 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.875393] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ba25bb1-5bf4-490a-b708-d91e1ac85fa2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.909417] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e573f778-0172-4898-b15c-e6e12fee8281 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.919073] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d73c3aa-94c9-485e-a29e-3364b72ea7f2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.922187] env[68674]: INFO nova.compute.manager [-] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Took 1.37 seconds to deallocate network for instance. [ 970.941584] env[68674]: DEBUG nova.compute.provider_tree [None req-90dde390-96cc-4651-aa04-5dee38987e6b tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 971.041659] env[68674]: DEBUG oslo_concurrency.lockutils [None req-41d55833-31f4-4777-baef-bb0a6da9377e tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "2d02adff-9fbf-4889-99e4-4efde5a51b33" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.029s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 971.052703] env[68674]: DEBUG oslo_vmware.api [None req-6bb977a2-886e-43ec-a97f-f427092f1ded tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240460, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.104625] env[68674]: DEBUG oslo_vmware.api [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240472, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.151588] env[68674]: DEBUG nova.network.neutron [-] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 971.209044] env[68674]: DEBUG oslo_vmware.api [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': task-3240469, 'name': ReconfigVM_Task, 'duration_secs': 1.283139} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.209341] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Reconfigured VM instance instance-00000057 to attach disk [datastore1] 691f9f14-4f53-46a4-8bf7-d027cfdd37e8/691f9f14-4f53-46a4-8bf7-d027cfdd37e8.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 971.209990] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-980ab801-d927-416f-8d8a-a26dd9d4af00 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.218714] env[68674]: DEBUG oslo_vmware.api [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Waiting for the task: (returnval){ [ 971.218714] env[68674]: value = "task-3240473" [ 971.218714] env[68674]: _type = "Task" [ 971.218714] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.230751] env[68674]: DEBUG oslo_vmware.api [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': task-3240473, 'name': Rename_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.430850] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fb8808ce-b3eb-4cfe-be3a-811ccd535358 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 971.444222] env[68674]: DEBUG nova.scheduler.client.report [None req-90dde390-96cc-4651-aa04-5dee38987e6b tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 971.554903] env[68674]: DEBUG oslo_vmware.api [None req-6bb977a2-886e-43ec-a97f-f427092f1ded tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240460, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.603883] env[68674]: DEBUG oslo_vmware.api [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240472, 'name': ReconfigVM_Task, 'duration_secs': 0.851844} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.603883] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Reconfigured VM instance instance-00000050 to detach disk 2000 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 971.605591] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87d1fa16-dd4d-4c83-a173-677d0d4e975a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.640256] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] ffdd1c62-1b4e-40cf-a27e-ff2877439701/ffdd1c62-1b4e-40cf-a27e-ff2877439701.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 971.640474] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquiring lock "refresh_cache-63d6c185-db2c-4ede-a716-9a0dd432ab1f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.640800] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquired lock "refresh_cache-63d6c185-db2c-4ede-a716-9a0dd432ab1f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 971.640842] env[68674]: DEBUG nova.network.neutron [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 971.643138] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b83088a-9be4-4d24-a63e-594ceac79e4c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.657291] env[68674]: INFO nova.compute.manager [-] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Took 1.31 seconds to deallocate network for instance. [ 971.664145] env[68674]: DEBUG oslo_vmware.api [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 971.664145] env[68674]: value = "task-3240474" [ 971.664145] env[68674]: _type = "Task" [ 971.664145] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.673133] env[68674]: DEBUG oslo_vmware.api [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240474, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.736027] env[68674]: DEBUG oslo_vmware.api [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': task-3240473, 'name': Rename_Task, 'duration_secs': 0.145675} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.736027] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 971.736027] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-12bcb3b0-371f-40a7-8f36-aef7bdf413f5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.747550] env[68674]: DEBUG oslo_vmware.api [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Waiting for the task: (returnval){ [ 971.747550] env[68674]: value = "task-3240475" [ 971.747550] env[68674]: _type = "Task" [ 971.747550] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.756484] env[68674]: DEBUG oslo_vmware.api [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': task-3240475, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.955478] env[68674]: DEBUG oslo_concurrency.lockutils [None req-90dde390-96cc-4651-aa04-5dee38987e6b tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.011s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 971.955478] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 16.957s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 971.955478] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 971.955478] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68674) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 971.956644] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.662s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 971.959966] env[68674]: INFO nova.compute.claims [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 971.964940] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91ec2611-4752-4ee4-a98e-da9dad396a43 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.980614] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21ce75c1-6ffe-424e-8481-438fd5655a03 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.988479] env[68674]: INFO nova.scheduler.client.report [None req-90dde390-96cc-4651-aa04-5dee38987e6b tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Deleted allocations for instance 33313b29-abaf-4ff7-9182-abfcfb9b3220 [ 972.006793] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c788ce86-2dcc-45be-84b1-d10ff006309e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.015838] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de38be30-a467-4e27-a13f-0d23e908a5da {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.053147] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179212MB free_disk=120GB free_vcpus=48 pci_devices=None {{(pid=68674) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 972.053326] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.060136] env[68674]: DEBUG nova.compute.manager [req-b2768e88-258d-4c80-9aa6-1184d71a4f89 req-c744eff0-78c8-4513-b032-b8f7c8db74dd service nova] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Received event network-vif-deleted-68f46874-a1b2-4e2f-ab13-1dd822565a4e {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 972.060591] env[68674]: DEBUG nova.compute.manager [req-b2768e88-258d-4c80-9aa6-1184d71a4f89 req-c744eff0-78c8-4513-b032-b8f7c8db74dd service nova] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Received event network-vif-plugged-2cf52206-a0c1-4b57-886d-23df69181f20 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 972.060591] env[68674]: DEBUG oslo_concurrency.lockutils [req-b2768e88-258d-4c80-9aa6-1184d71a4f89 req-c744eff0-78c8-4513-b032-b8f7c8db74dd service nova] Acquiring lock "63d6c185-db2c-4ede-a716-9a0dd432ab1f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.060861] env[68674]: DEBUG oslo_concurrency.lockutils [req-b2768e88-258d-4c80-9aa6-1184d71a4f89 req-c744eff0-78c8-4513-b032-b8f7c8db74dd service nova] Lock "63d6c185-db2c-4ede-a716-9a0dd432ab1f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 972.060861] env[68674]: DEBUG oslo_concurrency.lockutils [req-b2768e88-258d-4c80-9aa6-1184d71a4f89 req-c744eff0-78c8-4513-b032-b8f7c8db74dd service nova] Lock "63d6c185-db2c-4ede-a716-9a0dd432ab1f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 972.062177] env[68674]: DEBUG nova.compute.manager [req-b2768e88-258d-4c80-9aa6-1184d71a4f89 req-c744eff0-78c8-4513-b032-b8f7c8db74dd service nova] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] No waiting events found dispatching network-vif-plugged-2cf52206-a0c1-4b57-886d-23df69181f20 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 972.062177] env[68674]: WARNING nova.compute.manager [req-b2768e88-258d-4c80-9aa6-1184d71a4f89 req-c744eff0-78c8-4513-b032-b8f7c8db74dd service nova] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Received unexpected event network-vif-plugged-2cf52206-a0c1-4b57-886d-23df69181f20 for instance with vm_state shelved_offloaded and task_state spawning. [ 972.062177] env[68674]: DEBUG nova.compute.manager [req-b2768e88-258d-4c80-9aa6-1184d71a4f89 req-c744eff0-78c8-4513-b032-b8f7c8db74dd service nova] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Received event network-changed-2cf52206-a0c1-4b57-886d-23df69181f20 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 972.062177] env[68674]: DEBUG nova.compute.manager [req-b2768e88-258d-4c80-9aa6-1184d71a4f89 req-c744eff0-78c8-4513-b032-b8f7c8db74dd service nova] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Refreshing instance network info cache due to event network-changed-2cf52206-a0c1-4b57-886d-23df69181f20. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 972.062177] env[68674]: DEBUG oslo_concurrency.lockutils [req-b2768e88-258d-4c80-9aa6-1184d71a4f89 req-c744eff0-78c8-4513-b032-b8f7c8db74dd service nova] Acquiring lock "refresh_cache-63d6c185-db2c-4ede-a716-9a0dd432ab1f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.068757] env[68674]: DEBUG oslo_vmware.api [None req-6bb977a2-886e-43ec-a97f-f427092f1ded tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240460, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.111797] env[68674]: DEBUG oslo_concurrency.lockutils [None req-41d55833-31f4-4777-baef-bb0a6da9377e tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "2d02adff-9fbf-4889-99e4-4efde5a51b33" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.112101] env[68674]: DEBUG oslo_concurrency.lockutils [None req-41d55833-31f4-4777-baef-bb0a6da9377e tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "2d02adff-9fbf-4889-99e4-4efde5a51b33" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 972.112574] env[68674]: INFO nova.compute.manager [None req-41d55833-31f4-4777-baef-bb0a6da9377e tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Attaching volume 2110fa31-61e9-4ce5-a495-f1f566fee58d to /dev/sdb [ 972.163331] env[68674]: DEBUG oslo_concurrency.lockutils [None req-04a9f848-5acb-4f79-8a83-4774f7bdec07 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.164200] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8129e840-4c69-4685-8701-635c6f431345 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.180506] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c88908ee-e8cd-4b35-817a-c94205890453 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.183888] env[68674]: DEBUG oslo_vmware.api [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240474, 'name': ReconfigVM_Task, 'duration_secs': 0.378866} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.184211] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Reconfigured VM instance instance-00000050 to attach disk [datastore2] ffdd1c62-1b4e-40cf-a27e-ff2877439701/ffdd1c62-1b4e-40cf-a27e-ff2877439701.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 972.184469] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Updating instance 'ffdd1c62-1b4e-40cf-a27e-ff2877439701' progress to 50 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 972.201897] env[68674]: DEBUG nova.virt.block_device [None req-41d55833-31f4-4777-baef-bb0a6da9377e tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Updating existing volume attachment record: a41c83c8-de0f-4cbd-b590-e6628cbf8a86 {{(pid=68674) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 972.260474] env[68674]: DEBUG oslo_vmware.api [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': task-3240475, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.512433] env[68674]: DEBUG oslo_concurrency.lockutils [None req-90dde390-96cc-4651-aa04-5dee38987e6b tempest-ServerMetadataTestJSON-693085752 tempest-ServerMetadataTestJSON-693085752-project-member] Lock "33313b29-abaf-4ff7-9182-abfcfb9b3220" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.720s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 972.564128] env[68674]: DEBUG oslo_vmware.api [None req-6bb977a2-886e-43ec-a97f-f427092f1ded tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240460, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.695282] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b2af86b-8956-46fc-87a1-28a62d4a419d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.716918] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a19f0af-fbad-42a8-8d21-b4d656af0cdb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.735420] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Updating instance 'ffdd1c62-1b4e-40cf-a27e-ff2877439701' progress to 67 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 972.764472] env[68674]: DEBUG oslo_vmware.api [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': task-3240475, 'name': PowerOnVM_Task, 'duration_secs': 0.549562} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.764472] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 972.764472] env[68674]: INFO nova.compute.manager [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Took 8.60 seconds to spawn the instance on the hypervisor. [ 972.764786] env[68674]: DEBUG nova.compute.manager [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 972.766135] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d695e5-986c-42b4-9082-0d2c44540433 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.831858] env[68674]: DEBUG nova.network.neutron [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Updating instance_info_cache with network_info: [{"id": "2cf52206-a0c1-4b57-886d-23df69181f20", "address": "fa:16:3e:be:8f:42", "network": {"id": "e4b29de6-94e6-452e-b362-eb8d7dd615b9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2121858122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2bca98e5a30741249b1bdee899ffe433", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721c6720-3ce0-450e-9951-a894f03acc27", "external-id": "nsx-vlan-transportzone-394", "segmentation_id": 394, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cf52206-a0", "ovs_interfaceid": "2cf52206-a0c1-4b57-886d-23df69181f20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.068301] env[68674]: DEBUG oslo_vmware.api [None req-6bb977a2-886e-43ec-a97f-f427092f1ded tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240460, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.291254] env[68674]: INFO nova.compute.manager [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Took 35.04 seconds to build instance. [ 973.334970] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Releasing lock "refresh_cache-63d6c185-db2c-4ede-a716-9a0dd432ab1f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 973.341353] env[68674]: DEBUG oslo_concurrency.lockutils [req-b2768e88-258d-4c80-9aa6-1184d71a4f89 req-c744eff0-78c8-4513-b032-b8f7c8db74dd service nova] Acquired lock "refresh_cache-63d6c185-db2c-4ede-a716-9a0dd432ab1f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 973.341969] env[68674]: DEBUG nova.network.neutron [req-b2768e88-258d-4c80-9aa6-1184d71a4f89 req-c744eff0-78c8-4513-b032-b8f7c8db74dd service nova] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Refreshing network info cache for port 2cf52206-a0c1-4b57-886d-23df69181f20 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 973.351266] env[68674]: DEBUG nova.network.neutron [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Port 88af4dfc-59d6-4564-9ca9-d5383ed87da6 binding to destination host cpu-1 is already ACTIVE {{(pid=68674) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 973.371390] env[68674]: DEBUG nova.virt.hardware [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='e466f38b4a9130f1afbc0449a692dcef',container_format='bare',created_at=2025-04-03T08:12:30Z,direct_url=,disk_format='vmdk',id=563ae351-528c-4f48-afe4-222e4f9dee21,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-928580342-shelved',owner='2bca98e5a30741249b1bdee899ffe433',properties=ImageMetaProps,protected=,size=31668736,status='active',tags=,updated_at=2025-04-03T08:12:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 973.371390] env[68674]: DEBUG nova.virt.hardware [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 973.371390] env[68674]: DEBUG nova.virt.hardware [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 973.371669] env[68674]: DEBUG nova.virt.hardware [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 973.371669] env[68674]: DEBUG nova.virt.hardware [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 973.372079] env[68674]: DEBUG nova.virt.hardware [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 973.372079] env[68674]: DEBUG nova.virt.hardware [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 973.372693] env[68674]: DEBUG nova.virt.hardware [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 973.373040] env[68674]: DEBUG nova.virt.hardware [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 973.373255] env[68674]: DEBUG nova.virt.hardware [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 973.373475] env[68674]: DEBUG nova.virt.hardware [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 973.375986] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c52ebc3-9448-4152-9846-237cf2c9ac64 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.381508] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94cb3beb-a0bb-43e4-9b24-f64b3e0e14c7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.388434] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93864033-2526-444e-9548-1e4964b5ab44 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.401370] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e035e90-da1a-49ce-bdba-8bd23658d8e3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.414422] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:be:8f:42', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '721c6720-3ce0-450e-9951-a894f03acc27', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2cf52206-a0c1-4b57-886d-23df69181f20', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 973.423143] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 973.424111] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 973.424509] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dd554dc3-51eb-47be-8082-4f629121d7a9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.465353] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-704fff00-e8b7-41ed-9b0b-74977fbd78df {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.469743] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 973.469743] env[68674]: value = "task-3240479" [ 973.469743] env[68674]: _type = "Task" [ 973.469743] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.475580] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fbd4c99-e61b-4eaa-afcb-5855af0a4306 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.484724] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240479, 'name': CreateVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.495937] env[68674]: DEBUG nova.compute.provider_tree [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 973.564942] env[68674]: DEBUG oslo_vmware.api [None req-6bb977a2-886e-43ec-a97f-f427092f1ded tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240460, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.797927] env[68674]: DEBUG oslo_concurrency.lockutils [None req-caec5e51-52b3-48b1-adb8-7db69982bd25 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Lock "691f9f14-4f53-46a4-8bf7-d027cfdd37e8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.550s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.984230] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240479, 'name': CreateVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.004976] env[68674]: DEBUG nova.scheduler.client.report [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 974.065608] env[68674]: DEBUG oslo_vmware.api [None req-6bb977a2-886e-43ec-a97f-f427092f1ded tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240460, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.282492] env[68674]: DEBUG nova.network.neutron [req-b2768e88-258d-4c80-9aa6-1184d71a4f89 req-c744eff0-78c8-4513-b032-b8f7c8db74dd service nova] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Updated VIF entry in instance network info cache for port 2cf52206-a0c1-4b57-886d-23df69181f20. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 974.283261] env[68674]: DEBUG nova.network.neutron [req-b2768e88-258d-4c80-9aa6-1184d71a4f89 req-c744eff0-78c8-4513-b032-b8f7c8db74dd service nova] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Updating instance_info_cache with network_info: [{"id": "2cf52206-a0c1-4b57-886d-23df69181f20", "address": "fa:16:3e:be:8f:42", "network": {"id": "e4b29de6-94e6-452e-b362-eb8d7dd615b9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2121858122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2bca98e5a30741249b1bdee899ffe433", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721c6720-3ce0-450e-9951-a894f03acc27", "external-id": "nsx-vlan-transportzone-394", "segmentation_id": 394, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cf52206-a0", "ovs_interfaceid": "2cf52206-a0c1-4b57-886d-23df69181f20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.389187] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "ffdd1c62-1b4e-40cf-a27e-ff2877439701-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 974.389534] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "ffdd1c62-1b4e-40cf-a27e-ff2877439701-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.004s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 974.390115] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "ffdd1c62-1b4e-40cf-a27e-ff2877439701-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 974.446639] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b0d1aca4-17f6-4b60-8f8b-c47b90ab7730 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Acquiring lock "691f9f14-4f53-46a4-8bf7-d027cfdd37e8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 974.446903] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b0d1aca4-17f6-4b60-8f8b-c47b90ab7730 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Lock "691f9f14-4f53-46a4-8bf7-d027cfdd37e8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 974.447137] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b0d1aca4-17f6-4b60-8f8b-c47b90ab7730 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Acquiring lock "691f9f14-4f53-46a4-8bf7-d027cfdd37e8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 974.447330] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b0d1aca4-17f6-4b60-8f8b-c47b90ab7730 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Lock "691f9f14-4f53-46a4-8bf7-d027cfdd37e8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 974.447528] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b0d1aca4-17f6-4b60-8f8b-c47b90ab7730 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Lock "691f9f14-4f53-46a4-8bf7-d027cfdd37e8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 974.450384] env[68674]: INFO nova.compute.manager [None req-b0d1aca4-17f6-4b60-8f8b-c47b90ab7730 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Terminating instance [ 974.484446] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240479, 'name': CreateVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.509081] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.552s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 974.509616] env[68674]: DEBUG nova.compute.manager [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 974.512664] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.476s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 974.516016] env[68674]: INFO nova.compute.claims [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 974.566684] env[68674]: DEBUG oslo_vmware.api [None req-6bb977a2-886e-43ec-a97f-f427092f1ded tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240460, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.785510] env[68674]: DEBUG oslo_concurrency.lockutils [req-b2768e88-258d-4c80-9aa6-1184d71a4f89 req-c744eff0-78c8-4513-b032-b8f7c8db74dd service nova] Releasing lock "refresh_cache-63d6c185-db2c-4ede-a716-9a0dd432ab1f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 974.955736] env[68674]: DEBUG nova.compute.manager [None req-b0d1aca4-17f6-4b60-8f8b-c47b90ab7730 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 974.956468] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b0d1aca4-17f6-4b60-8f8b-c47b90ab7730 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 974.958791] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e80e8d0-e09a-4423-aa5a-7a75c0e8ad82 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.968504] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0d1aca4-17f6-4b60-8f8b-c47b90ab7730 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 974.968791] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-359196de-2677-4d0f-a230-7a73a016c07e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.978511] env[68674]: DEBUG oslo_vmware.api [None req-b0d1aca4-17f6-4b60-8f8b-c47b90ab7730 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Waiting for the task: (returnval){ [ 974.978511] env[68674]: value = "task-3240481" [ 974.978511] env[68674]: _type = "Task" [ 974.978511] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.988640] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240479, 'name': CreateVM_Task, 'duration_secs': 1.380205} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.988640] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 974.988640] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/563ae351-528c-4f48-afe4-222e4f9dee21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.988640] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquired lock "[datastore1] devstack-image-cache_base/563ae351-528c-4f48-afe4-222e4f9dee21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 974.988640] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/563ae351-528c-4f48-afe4-222e4f9dee21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 974.990920] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b59f839-958a-446f-88a9-5c5fd59a86a4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.993339] env[68674]: DEBUG oslo_vmware.api [None req-b0d1aca4-17f6-4b60-8f8b-c47b90ab7730 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': task-3240481, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.997535] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 974.997535] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5216b5b9-4a74-2616-2316-365b62b6be60" [ 974.997535] env[68674]: _type = "Task" [ 974.997535] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.006092] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5216b5b9-4a74-2616-2316-365b62b6be60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.019397] env[68674]: DEBUG nova.compute.utils [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 975.024469] env[68674]: DEBUG nova.compute.manager [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 975.024703] env[68674]: DEBUG nova.network.neutron [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 975.074557] env[68674]: DEBUG oslo_vmware.api [None req-6bb977a2-886e-43ec-a97f-f427092f1ded tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240460, 'name': ReconfigVM_Task, 'duration_secs': 7.49779} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.074557] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6bb977a2-886e-43ec-a97f-f427092f1ded tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Releasing lock "0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 975.074557] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6bb977a2-886e-43ec-a97f-f427092f1ded tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Reconfigured VM to detach interface {{(pid=68674) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 975.101107] env[68674]: DEBUG nova.policy [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3ce343abf0f14bb5b5141c50113ccf6b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61ea6bfeb37d470a970e9c98e4827ade', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 975.468679] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "refresh_cache-ffdd1c62-1b4e-40cf-a27e-ff2877439701" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.468679] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquired lock "refresh_cache-ffdd1c62-1b4e-40cf-a27e-ff2877439701" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 975.468679] env[68674]: DEBUG nova.network.neutron [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 975.490937] env[68674]: DEBUG oslo_vmware.api [None req-b0d1aca4-17f6-4b60-8f8b-c47b90ab7730 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': task-3240481, 'name': PowerOffVM_Task, 'duration_secs': 0.300482} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.491268] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0d1aca4-17f6-4b60-8f8b-c47b90ab7730 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 975.491442] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b0d1aca4-17f6-4b60-8f8b-c47b90ab7730 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 975.491705] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-83da60c6-a1c9-4b05-ba7a-e2ebfda9ef01 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.512284] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Releasing lock "[datastore1] devstack-image-cache_base/563ae351-528c-4f48-afe4-222e4f9dee21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 975.512284] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Processing image 563ae351-528c-4f48-afe4-222e4f9dee21 {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 975.512284] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/563ae351-528c-4f48-afe4-222e4f9dee21/563ae351-528c-4f48-afe4-222e4f9dee21.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.512284] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquired lock "[datastore1] devstack-image-cache_base/563ae351-528c-4f48-afe4-222e4f9dee21/563ae351-528c-4f48-afe4-222e4f9dee21.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 975.512284] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 975.512284] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-18a9d66c-eb2f-4c48-90ca-d166df1c6729 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.522064] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 975.522064] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 975.522064] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99729678-c611-4c81-b796-ee0cf3faa071 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.528544] env[68674]: DEBUG nova.compute.manager [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 975.536251] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 975.536251] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]529bde44-d7bb-4d28-f087-940dde9f7f8a" [ 975.536251] env[68674]: _type = "Task" [ 975.536251] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.550279] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]529bde44-d7bb-4d28-f087-940dde9f7f8a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.566828] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b0d1aca4-17f6-4b60-8f8b-c47b90ab7730 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 975.566828] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b0d1aca4-17f6-4b60-8f8b-c47b90ab7730 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 975.566828] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0d1aca4-17f6-4b60-8f8b-c47b90ab7730 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Deleting the datastore file [datastore1] 691f9f14-4f53-46a4-8bf7-d027cfdd37e8 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 975.566828] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4004b335-88a2-4c6a-9463-99006f369750 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.576233] env[68674]: DEBUG oslo_vmware.api [None req-b0d1aca4-17f6-4b60-8f8b-c47b90ab7730 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Waiting for the task: (returnval){ [ 975.576233] env[68674]: value = "task-3240483" [ 975.576233] env[68674]: _type = "Task" [ 975.576233] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.591038] env[68674]: DEBUG oslo_vmware.api [None req-b0d1aca4-17f6-4b60-8f8b-c47b90ab7730 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': task-3240483, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.628793] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Acquiring lock "f6f5fb73-521a-4c83-93ea-a1eb2af2e142" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 975.629139] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Lock "f6f5fb73-521a-4c83-93ea-a1eb2af2e142" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 975.668671] env[68674]: DEBUG nova.network.neutron [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Successfully created port: 4fae8d88-2aaa-48bd-b0c4-72bc768efce3 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 975.672534] env[68674]: DEBUG nova.compute.manager [req-577fc868-aa3a-4605-9ae7-0db8f4e981db req-2319c1c0-e742-4204-b3ab-d26fa51d575b service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Received event network-vif-deleted-861f9feb-a46d-4b29-851a-f958bd80ef86 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 975.672781] env[68674]: INFO nova.compute.manager [req-577fc868-aa3a-4605-9ae7-0db8f4e981db req-2319c1c0-e742-4204-b3ab-d26fa51d575b service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Neutron deleted interface 861f9feb-a46d-4b29-851a-f958bd80ef86; detaching it from the instance and deleting it from the info cache [ 975.673102] env[68674]: DEBUG nova.network.neutron [req-577fc868-aa3a-4605-9ae7-0db8f4e981db req-2319c1c0-e742-4204-b3ab-d26fa51d575b service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Updating instance_info_cache with network_info: [{"id": "7d9b4902-f03b-4046-b4ba-0bc1296918da", "address": "fa:16:3e:17:ab:03", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.195", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d9b4902-f0", "ovs_interfaceid": "7d9b4902-f03b-4046-b4ba-0bc1296918da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d4a0023c-9d88-48c5-9362-16dd8aca5a74", "address": "fa:16:3e:93:1a:51", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4a0023c-9d", "ovs_interfaceid": "d4a0023c-9d88-48c5-9362-16dd8aca5a74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.886044] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce2b44be-80a4-4ae5-9ec9-173dd590fa7f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.893982] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f68e97c-d8ff-4169-9718-9a2f8ad625ba {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.925975] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98298626-c58d-4a27-8d76-16425da28203 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.933593] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56c57f39-7428-4708-8362-2140609b98a7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.950341] env[68674]: DEBUG nova.compute.provider_tree [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 975.964060] env[68674]: DEBUG nova.compute.manager [req-c29b446a-a0ec-431c-ac3c-219cefda2c25 req-672d1330-33b3-464d-920b-ad2a373a1177 service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Received event network-vif-deleted-d4a0023c-9d88-48c5-9362-16dd8aca5a74 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 975.966091] env[68674]: INFO nova.compute.manager [req-c29b446a-a0ec-431c-ac3c-219cefda2c25 req-672d1330-33b3-464d-920b-ad2a373a1177 service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Neutron deleted interface d4a0023c-9d88-48c5-9362-16dd8aca5a74; detaching it from the instance and deleting it from the info cache [ 975.966091] env[68674]: DEBUG nova.network.neutron [req-c29b446a-a0ec-431c-ac3c-219cefda2c25 req-672d1330-33b3-464d-920b-ad2a373a1177 service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Updating instance_info_cache with network_info: [{"id": "7d9b4902-f03b-4046-b4ba-0bc1296918da", "address": "fa:16:3e:17:ab:03", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.195", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d9b4902-f0", "ovs_interfaceid": "7d9b4902-f03b-4046-b4ba-0bc1296918da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.052353] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Preparing fetch location {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 976.052900] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Fetch image to [datastore1] OSTACK_IMG_87b7359c-e5de-4d54-b220-7d8a3353fedb/OSTACK_IMG_87b7359c-e5de-4d54-b220-7d8a3353fedb.vmdk {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 976.053239] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Downloading stream optimized image 563ae351-528c-4f48-afe4-222e4f9dee21 to [datastore1] OSTACK_IMG_87b7359c-e5de-4d54-b220-7d8a3353fedb/OSTACK_IMG_87b7359c-e5de-4d54-b220-7d8a3353fedb.vmdk on the data store datastore1 as vApp {{(pid=68674) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 976.053503] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Downloading image file data 563ae351-528c-4f48-afe4-222e4f9dee21 to the ESX as VM named 'OSTACK_IMG_87b7359c-e5de-4d54-b220-7d8a3353fedb' {{(pid=68674) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 976.096390] env[68674]: DEBUG oslo_vmware.api [None req-b0d1aca4-17f6-4b60-8f8b-c47b90ab7730 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Task: {'id': task-3240483, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1359} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.099902] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0d1aca4-17f6-4b60-8f8b-c47b90ab7730 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 976.100167] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b0d1aca4-17f6-4b60-8f8b-c47b90ab7730 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 976.102019] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b0d1aca4-17f6-4b60-8f8b-c47b90ab7730 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 976.102019] env[68674]: INFO nova.compute.manager [None req-b0d1aca4-17f6-4b60-8f8b-c47b90ab7730 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Took 1.14 seconds to destroy the instance on the hypervisor. [ 976.102019] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b0d1aca4-17f6-4b60-8f8b-c47b90ab7730 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 976.102630] env[68674]: DEBUG nova.compute.manager [-] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 976.102771] env[68674]: DEBUG nova.network.neutron [-] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 976.132341] env[68674]: DEBUG nova.compute.manager [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 976.179972] env[68674]: DEBUG oslo_concurrency.lockutils [req-577fc868-aa3a-4605-9ae7-0db8f4e981db req-2319c1c0-e742-4204-b3ab-d26fa51d575b service nova] Acquiring lock "0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.179972] env[68674]: DEBUG oslo_concurrency.lockutils [req-577fc868-aa3a-4605-9ae7-0db8f4e981db req-2319c1c0-e742-4204-b3ab-d26fa51d575b service nova] Acquired lock "0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 976.180559] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43a520db-c558-49e7-a71d-b096b1c1918d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.184406] env[68674]: DEBUG oslo_vmware.rw_handles [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 976.184406] env[68674]: value = "resgroup-9" [ 976.184406] env[68674]: _type = "ResourcePool" [ 976.184406] env[68674]: }. {{(pid=68674) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 976.184837] env[68674]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-a204bf58-7f2e-44c4-8634-68bdd9240cf8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.221364] env[68674]: DEBUG oslo_concurrency.lockutils [req-577fc868-aa3a-4605-9ae7-0db8f4e981db req-2319c1c0-e742-4204-b3ab-d26fa51d575b service nova] Releasing lock "0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 976.221761] env[68674]: WARNING nova.compute.manager [req-577fc868-aa3a-4605-9ae7-0db8f4e981db req-2319c1c0-e742-4204-b3ab-d26fa51d575b service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Detach interface failed, port_id=861f9feb-a46d-4b29-851a-f958bd80ef86, reason: No device with interface-id 861f9feb-a46d-4b29-851a-f958bd80ef86 exists on VM: nova.exception.NotFound: No device with interface-id 861f9feb-a46d-4b29-851a-f958bd80ef86 exists on VM [ 976.223741] env[68674]: DEBUG oslo_vmware.rw_handles [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lease: (returnval){ [ 976.223741] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]522a3f9a-f758-1cb0-9ec4-0f58147fca73" [ 976.223741] env[68674]: _type = "HttpNfcLease" [ 976.223741] env[68674]: } obtained for vApp import into resource pool (val){ [ 976.223741] env[68674]: value = "resgroup-9" [ 976.223741] env[68674]: _type = "ResourcePool" [ 976.223741] env[68674]: }. {{(pid=68674) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 976.224149] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the lease: (returnval){ [ 976.224149] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]522a3f9a-f758-1cb0-9ec4-0f58147fca73" [ 976.224149] env[68674]: _type = "HttpNfcLease" [ 976.224149] env[68674]: } to be ready. {{(pid=68674) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 976.234887] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 976.234887] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]522a3f9a-f758-1cb0-9ec4-0f58147fca73" [ 976.234887] env[68674]: _type = "HttpNfcLease" [ 976.234887] env[68674]: } is initializing. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 976.367358] env[68674]: DEBUG nova.network.neutron [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Updating instance_info_cache with network_info: [{"id": "88af4dfc-59d6-4564-9ca9-d5383ed87da6", "address": "fa:16:3e:92:28:03", "network": {"id": "d412f884-932c-461f-8f04-990897b04532", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-692483706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6b179855b874365964446f95f9f5a53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap88af4dfc-59", "ovs_interfaceid": "88af4dfc-59d6-4564-9ca9-d5383ed87da6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.452909] env[68674]: DEBUG nova.scheduler.client.report [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 976.470472] env[68674]: DEBUG oslo_concurrency.lockutils [req-c29b446a-a0ec-431c-ac3c-219cefda2c25 req-672d1330-33b3-464d-920b-ad2a373a1177 service nova] Acquiring lock "0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.470669] env[68674]: DEBUG oslo_concurrency.lockutils [req-c29b446a-a0ec-431c-ac3c-219cefda2c25 req-672d1330-33b3-464d-920b-ad2a373a1177 service nova] Acquired lock "0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 976.471890] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52383a0b-df22-4a76-a82a-0e6f1e0ef68b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.493472] env[68674]: DEBUG nova.network.neutron [-] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.495907] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dae3642-b1f1-4fe1-905a-50134a538f0e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.525036] env[68674]: DEBUG nova.virt.vmwareapi.vmops [req-c29b446a-a0ec-431c-ac3c-219cefda2c25 req-672d1330-33b3-464d-920b-ad2a373a1177 service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Reconfiguring VM to detach interface {{(pid=68674) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 976.525771] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-602380f5-c2fd-44cd-a714-2c9cfb6ce0f2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.540107] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6bb977a2-886e-43ec-a97f-f427092f1ded tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "refresh_cache-0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.540307] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6bb977a2-886e-43ec-a97f-f427092f1ded tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquired lock "refresh_cache-0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 976.540485] env[68674]: DEBUG nova.network.neutron [None req-6bb977a2-886e-43ec-a97f-f427092f1ded tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 976.545533] env[68674]: DEBUG nova.compute.manager [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 976.549175] env[68674]: DEBUG oslo_vmware.api [req-c29b446a-a0ec-431c-ac3c-219cefda2c25 req-672d1330-33b3-464d-920b-ad2a373a1177 service nova] Waiting for the task: (returnval){ [ 976.549175] env[68674]: value = "task-3240485" [ 976.549175] env[68674]: _type = "Task" [ 976.549175] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.558462] env[68674]: DEBUG oslo_vmware.api [req-c29b446a-a0ec-431c-ac3c-219cefda2c25 req-672d1330-33b3-464d-920b-ad2a373a1177 service nova] Task: {'id': task-3240485, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.568473] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b40cd1b7-3f28-45aa-9d60-843bfbc5fe30 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "0e7c5243-ad33-4391-8977-b9019643e3de" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 976.658834] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 976.731696] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 976.731696] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]522a3f9a-f758-1cb0-9ec4-0f58147fca73" [ 976.731696] env[68674]: _type = "HttpNfcLease" [ 976.731696] env[68674]: } is ready. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 976.731998] env[68674]: DEBUG oslo_vmware.rw_handles [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 976.731998] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]522a3f9a-f758-1cb0-9ec4-0f58147fca73" [ 976.731998] env[68674]: _type = "HttpNfcLease" [ 976.731998] env[68674]: }. {{(pid=68674) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 976.732726] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43df66b3-3009-43f5-832a-a86ee16ae172 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.744629] env[68674]: DEBUG oslo_vmware.rw_handles [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a8adc8-2fa1-5074-7565-c57a366d8499/disk-0.vmdk from lease info. {{(pid=68674) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 976.744961] env[68674]: DEBUG oslo_vmware.rw_handles [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Creating HTTP connection to write to file with size = 31668736 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a8adc8-2fa1-5074-7565-c57a366d8499/disk-0.vmdk. {{(pid=68674) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 976.826082] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f5119b38-7dec-4bc1-a8ed-606369ce7376 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.870764] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Releasing lock "refresh_cache-ffdd1c62-1b4e-40cf-a27e-ff2877439701" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 976.959634] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.447s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 976.960228] env[68674]: DEBUG nova.compute.manager [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 976.963389] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ce24d966-e593-4c43-885e-64ce7013245f tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.414s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 976.963549] env[68674]: DEBUG nova.objects.instance [None req-ce24d966-e593-4c43-885e-64ce7013245f tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Lazy-loading 'resources' on Instance uuid 082fd3a5-b30e-41cc-8fba-dab2802a1e3e {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 977.000018] env[68674]: INFO nova.compute.manager [-] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Took 0.90 seconds to deallocate network for instance. [ 977.065047] env[68674]: DEBUG oslo_vmware.api [req-c29b446a-a0ec-431c-ac3c-219cefda2c25 req-672d1330-33b3-464d-920b-ad2a373a1177 service nova] Task: {'id': task-3240485, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.269563] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-41d55833-31f4-4777-baef-bb0a6da9377e tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Volume attach. Driver type: vmdk {{(pid=68674) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 977.270228] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-41d55833-31f4-4777-baef-bb0a6da9377e tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647627', 'volume_id': '2110fa31-61e9-4ce5-a495-f1f566fee58d', 'name': 'volume-2110fa31-61e9-4ce5-a495-f1f566fee58d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2d02adff-9fbf-4889-99e4-4efde5a51b33', 'attached_at': '', 'detached_at': '', 'volume_id': '2110fa31-61e9-4ce5-a495-f1f566fee58d', 'serial': '2110fa31-61e9-4ce5-a495-f1f566fee58d'} {{(pid=68674) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 977.273154] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-272bd1e5-77e7-4f3d-99c6-877eef5c8e94 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.310544] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97cfaec0-4837-43a1-95b1-fec38481c56c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.340496] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-41d55833-31f4-4777-baef-bb0a6da9377e tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] volume-2110fa31-61e9-4ce5-a495-f1f566fee58d/volume-2110fa31-61e9-4ce5-a495-f1f566fee58d.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 977.343210] env[68674]: DEBUG nova.virt.hardware [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 977.343497] env[68674]: DEBUG nova.virt.hardware [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 977.343664] env[68674]: DEBUG nova.virt.hardware [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 977.343848] env[68674]: DEBUG nova.virt.hardware [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 977.344015] env[68674]: DEBUG nova.virt.hardware [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 977.344574] env[68674]: DEBUG nova.virt.hardware [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 977.344805] env[68674]: DEBUG nova.virt.hardware [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 977.344965] env[68674]: DEBUG nova.virt.hardware [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 977.345157] env[68674]: DEBUG nova.virt.hardware [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 977.345329] env[68674]: DEBUG nova.virt.hardware [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 977.345515] env[68674]: DEBUG nova.virt.hardware [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 977.348809] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-872557ac-f1fc-477b-b9aa-9a722c864d9f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.365156] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be747745-339d-4556-a281-d8b1f91f2403 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.376619] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4b6d20a-40c7-4495-8a17-c0ed177f77d0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.384811] env[68674]: DEBUG oslo_vmware.api [None req-41d55833-31f4-4777-baef-bb0a6da9377e tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 977.384811] env[68674]: value = "task-3240486" [ 977.384811] env[68674]: _type = "Task" [ 977.384811] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.410169] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c23254c-9d63-4a60-a0fa-ffebd0159953 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.416816] env[68674]: DEBUG oslo_vmware.api [None req-41d55833-31f4-4777-baef-bb0a6da9377e tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240486, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.449409] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3565206d-d080-468c-9b72-de6b2d0c640d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.457736] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Updating instance 'ffdd1c62-1b4e-40cf-a27e-ff2877439701' progress to 83 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 977.467075] env[68674]: DEBUG nova.compute.utils [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 977.471683] env[68674]: DEBUG nova.compute.manager [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 977.471892] env[68674]: DEBUG nova.network.neutron [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 977.508453] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b0d1aca4-17f6-4b60-8f8b-c47b90ab7730 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 977.570323] env[68674]: DEBUG oslo_vmware.api [req-c29b446a-a0ec-431c-ac3c-219cefda2c25 req-672d1330-33b3-464d-920b-ad2a373a1177 service nova] Task: {'id': task-3240485, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.574485] env[68674]: DEBUG nova.policy [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '43ee83a287f14c8a95499b6a16e33139', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d376e63760b4b708305a7b0aafd98a3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 977.580923] env[68674]: DEBUG nova.network.neutron [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Successfully updated port: 4fae8d88-2aaa-48bd-b0c4-72bc768efce3 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 977.656776] env[68674]: DEBUG nova.network.neutron [None req-6bb977a2-886e-43ec-a97f-f427092f1ded tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Updating instance_info_cache with network_info: [{"id": "7d9b4902-f03b-4046-b4ba-0bc1296918da", "address": "fa:16:3e:17:ab:03", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.195", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d9b4902-f0", "ovs_interfaceid": "7d9b4902-f03b-4046-b4ba-0bc1296918da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.676536] env[68674]: DEBUG oslo_vmware.rw_handles [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520d21c7-e2d9-8c3d-717a-dbd89cdd9f8a/disk-0.vmdk. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 977.677564] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d88e4a2-ca29-4d16-8cb9-67d67a84772c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.686480] env[68674]: DEBUG oslo_vmware.rw_handles [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520d21c7-e2d9-8c3d-717a-dbd89cdd9f8a/disk-0.vmdk is in state: ready. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 977.686686] env[68674]: ERROR oslo_vmware.rw_handles [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520d21c7-e2d9-8c3d-717a-dbd89cdd9f8a/disk-0.vmdk due to incomplete transfer. [ 977.686888] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-ccccd4fd-11de-4c67-bc50-b9f3949f462b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.701346] env[68674]: DEBUG oslo_vmware.rw_handles [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520d21c7-e2d9-8c3d-717a-dbd89cdd9f8a/disk-0.vmdk. {{(pid=68674) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 977.701346] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Uploaded image fb80fca4-b398-4718-91d5-03a169cdace7 to the Glance image server {{(pid=68674) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 977.704397] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Destroying the VM {{(pid=68674) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 977.704968] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-acf24545-f118-445a-ba38-3187921aa7ba {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.708431] env[68674]: DEBUG nova.compute.manager [req-c6b29cff-3992-4924-afe7-369b49d054a9 req-ece83e77-e744-497e-b801-1aea49b41670 service nova] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Received event network-vif-deleted-452f8db5-332d-4019-ac05-2ad8f6360a0b {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 977.708575] env[68674]: DEBUG nova.compute.manager [req-c6b29cff-3992-4924-afe7-369b49d054a9 req-ece83e77-e744-497e-b801-1aea49b41670 service nova] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Received event network-vif-plugged-4fae8d88-2aaa-48bd-b0c4-72bc768efce3 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 977.708771] env[68674]: DEBUG oslo_concurrency.lockutils [req-c6b29cff-3992-4924-afe7-369b49d054a9 req-ece83e77-e744-497e-b801-1aea49b41670 service nova] Acquiring lock "2a7a6269-65a8-402c-b174-a4a46d20a33a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 977.708975] env[68674]: DEBUG oslo_concurrency.lockutils [req-c6b29cff-3992-4924-afe7-369b49d054a9 req-ece83e77-e744-497e-b801-1aea49b41670 service nova] Lock "2a7a6269-65a8-402c-b174-a4a46d20a33a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 977.709289] env[68674]: DEBUG oslo_concurrency.lockutils [req-c6b29cff-3992-4924-afe7-369b49d054a9 req-ece83e77-e744-497e-b801-1aea49b41670 service nova] Lock "2a7a6269-65a8-402c-b174-a4a46d20a33a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 977.709379] env[68674]: DEBUG nova.compute.manager [req-c6b29cff-3992-4924-afe7-369b49d054a9 req-ece83e77-e744-497e-b801-1aea49b41670 service nova] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] No waiting events found dispatching network-vif-plugged-4fae8d88-2aaa-48bd-b0c4-72bc768efce3 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 977.710113] env[68674]: WARNING nova.compute.manager [req-c6b29cff-3992-4924-afe7-369b49d054a9 req-ece83e77-e744-497e-b801-1aea49b41670 service nova] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Received unexpected event network-vif-plugged-4fae8d88-2aaa-48bd-b0c4-72bc768efce3 for instance with vm_state building and task_state spawning. [ 977.710113] env[68674]: DEBUG nova.compute.manager [req-c6b29cff-3992-4924-afe7-369b49d054a9 req-ece83e77-e744-497e-b801-1aea49b41670 service nova] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Received event network-changed-4fae8d88-2aaa-48bd-b0c4-72bc768efce3 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 977.710113] env[68674]: DEBUG nova.compute.manager [req-c6b29cff-3992-4924-afe7-369b49d054a9 req-ece83e77-e744-497e-b801-1aea49b41670 service nova] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Refreshing instance network info cache due to event network-changed-4fae8d88-2aaa-48bd-b0c4-72bc768efce3. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 977.710113] env[68674]: DEBUG oslo_concurrency.lockutils [req-c6b29cff-3992-4924-afe7-369b49d054a9 req-ece83e77-e744-497e-b801-1aea49b41670 service nova] Acquiring lock "refresh_cache-2a7a6269-65a8-402c-b174-a4a46d20a33a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 977.710113] env[68674]: DEBUG oslo_concurrency.lockutils [req-c6b29cff-3992-4924-afe7-369b49d054a9 req-ece83e77-e744-497e-b801-1aea49b41670 service nova] Acquired lock "refresh_cache-2a7a6269-65a8-402c-b174-a4a46d20a33a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 977.710582] env[68674]: DEBUG nova.network.neutron [req-c6b29cff-3992-4924-afe7-369b49d054a9 req-ece83e77-e744-497e-b801-1aea49b41670 service nova] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Refreshing network info cache for port 4fae8d88-2aaa-48bd-b0c4-72bc768efce3 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 977.719826] env[68674]: DEBUG oslo_vmware.api [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Waiting for the task: (returnval){ [ 977.719826] env[68674]: value = "task-3240487" [ 977.719826] env[68674]: _type = "Task" [ 977.719826] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.728780] env[68674]: DEBUG oslo_vmware.api [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240487, 'name': Destroy_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.887620] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20fba51b-87e7-4b12-8e07-24d6a8c0bfa1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.908018] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bd4886e-f57b-4478-a46e-2b92f37d4306 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.919890] env[68674]: DEBUG oslo_vmware.api [None req-41d55833-31f4-4777-baef-bb0a6da9377e tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240486, 'name': ReconfigVM_Task, 'duration_secs': 0.507073} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.920630] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-41d55833-31f4-4777-baef-bb0a6da9377e tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Reconfigured VM instance instance-0000004d to attach disk [datastore1] volume-2110fa31-61e9-4ce5-a495-f1f566fee58d/volume-2110fa31-61e9-4ce5-a495-f1f566fee58d.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 977.954583] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48c95510-7a61-4df7-a2c3-c9e280ff36bb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.969019] env[68674]: DEBUG oslo_vmware.rw_handles [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Completed reading data from the image iterator. {{(pid=68674) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 977.969256] env[68674]: DEBUG oslo_vmware.rw_handles [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a8adc8-2fa1-5074-7565-c57a366d8499/disk-0.vmdk. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 977.971050] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 977.971854] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-073d2ecb-3910-45dc-b066-b168ef30037e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.975048] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22b2d119-d33a-475f-9560-238810e751dd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.981324] env[68674]: DEBUG nova.compute.manager [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 977.981324] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-79f6c4b7-6934-48e9-a2c7-1a51e2ef49a0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.985267] env[68674]: DEBUG oslo_vmware.api [None req-41d55833-31f4-4777-baef-bb0a6da9377e tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 977.985267] env[68674]: value = "task-3240488" [ 977.985267] env[68674]: _type = "Task" [ 977.985267] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.989126] env[68674]: DEBUG oslo_vmware.rw_handles [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a8adc8-2fa1-5074-7565-c57a366d8499/disk-0.vmdk is in state: ready. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 977.989299] env[68674]: DEBUG oslo_vmware.rw_handles [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a8adc8-2fa1-5074-7565-c57a366d8499/disk-0.vmdk. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 977.994388] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fb303bc-b96e-4259-8f48-adaa7b929dad {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.998996] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-8b0388ed-aa57-4c9a-b2c4-3fd46448bd95 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.000512] env[68674]: DEBUG oslo_vmware.api [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 978.000512] env[68674]: value = "task-3240489" [ 978.000512] env[68674]: _type = "Task" [ 978.000512] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.009453] env[68674]: DEBUG oslo_vmware.api [None req-41d55833-31f4-4777-baef-bb0a6da9377e tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240488, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.021093] env[68674]: DEBUG nova.compute.provider_tree [None req-ce24d966-e593-4c43-885e-64ce7013245f tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 978.022610] env[68674]: DEBUG oslo_vmware.api [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240489, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.028401] env[68674]: DEBUG nova.network.neutron [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Successfully created port: e6a3416c-8601-4d3f-8b5b-74d43a100d6c {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 978.062743] env[68674]: DEBUG oslo_vmware.api [req-c29b446a-a0ec-431c-ac3c-219cefda2c25 req-672d1330-33b3-464d-920b-ad2a373a1177 service nova] Task: {'id': task-3240485, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.083973] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "refresh_cache-2a7a6269-65a8-402c-b174-a4a46d20a33a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.159880] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6bb977a2-886e-43ec-a97f-f427092f1ded tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Releasing lock "refresh_cache-0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 978.226319] env[68674]: DEBUG oslo_vmware.rw_handles [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a8adc8-2fa1-5074-7565-c57a366d8499/disk-0.vmdk. {{(pid=68674) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 978.226567] env[68674]: INFO nova.virt.vmwareapi.images [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Downloaded image file data 563ae351-528c-4f48-afe4-222e4f9dee21 [ 978.227778] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3d7dace-435d-4a44-b266-a4fac1d27843 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.233681] env[68674]: DEBUG oslo_vmware.api [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240487, 'name': Destroy_Task, 'duration_secs': 0.450771} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.234339] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Destroyed the VM [ 978.234610] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Deleting Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 978.234860] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-69ca7297-6a31-483a-b0cb-dac591c236c6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.248338] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8b8f633b-8c16-401c-97ec-5fe26b3cd3f1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.254604] env[68674]: DEBUG oslo_vmware.api [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Waiting for the task: (returnval){ [ 978.254604] env[68674]: value = "task-3240490" [ 978.254604] env[68674]: _type = "Task" [ 978.254604] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.263026] env[68674]: DEBUG oslo_vmware.api [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240490, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.265844] env[68674]: DEBUG nova.network.neutron [req-c6b29cff-3992-4924-afe7-369b49d054a9 req-ece83e77-e744-497e-b801-1aea49b41670 service nova] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 978.282766] env[68674]: INFO nova.virt.vmwareapi.images [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] The imported VM was unregistered [ 978.285299] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Caching image {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 978.285299] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Creating directory with path [datastore1] devstack-image-cache_base/563ae351-528c-4f48-afe4-222e4f9dee21 {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 978.285992] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2aad9912-e016-421f-a7ec-6a7f4a62486b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.300527] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Created directory with path [datastore1] devstack-image-cache_base/563ae351-528c-4f48-afe4-222e4f9dee21 {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 978.300527] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_87b7359c-e5de-4d54-b220-7d8a3353fedb/OSTACK_IMG_87b7359c-e5de-4d54-b220-7d8a3353fedb.vmdk to [datastore1] devstack-image-cache_base/563ae351-528c-4f48-afe4-222e4f9dee21/563ae351-528c-4f48-afe4-222e4f9dee21.vmdk. {{(pid=68674) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 978.300722] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-4f053e8f-fd4f-4ddf-a940-cfb04e85eec7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.309014] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 978.309014] env[68674]: value = "task-3240492" [ 978.309014] env[68674]: _type = "Task" [ 978.309014] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.317133] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240492, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.368591] env[68674]: DEBUG nova.network.neutron [req-c6b29cff-3992-4924-afe7-369b49d054a9 req-ece83e77-e744-497e-b801-1aea49b41670 service nova] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 978.503332] env[68674]: DEBUG oslo_vmware.api [None req-41d55833-31f4-4777-baef-bb0a6da9377e tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240488, 'name': ReconfigVM_Task, 'duration_secs': 0.160055} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.507785] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-41d55833-31f4-4777-baef-bb0a6da9377e tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647627', 'volume_id': '2110fa31-61e9-4ce5-a495-f1f566fee58d', 'name': 'volume-2110fa31-61e9-4ce5-a495-f1f566fee58d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2d02adff-9fbf-4889-99e4-4efde5a51b33', 'attached_at': '', 'detached_at': '', 'volume_id': '2110fa31-61e9-4ce5-a495-f1f566fee58d', 'serial': '2110fa31-61e9-4ce5-a495-f1f566fee58d'} {{(pid=68674) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 978.516484] env[68674]: DEBUG oslo_vmware.api [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240489, 'name': PowerOnVM_Task, 'duration_secs': 0.482321} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.517143] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 978.517143] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8639e043-f82c-486e-b6bc-9ecb5986a1e9 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Updating instance 'ffdd1c62-1b4e-40cf-a27e-ff2877439701' progress to 100 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 978.524780] env[68674]: DEBUG nova.scheduler.client.report [None req-ce24d966-e593-4c43-885e-64ce7013245f tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 978.567143] env[68674]: DEBUG oslo_vmware.api [req-c29b446a-a0ec-431c-ac3c-219cefda2c25 req-672d1330-33b3-464d-920b-ad2a373a1177 service nova] Task: {'id': task-3240485, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.664840] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6bb977a2-886e-43ec-a97f-f427092f1ded tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "interface-0e7c5243-ad33-4391-8977-b9019643e3de-861f9feb-a46d-4b29-851a-f958bd80ef86" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 11.742s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 978.769899] env[68674]: DEBUG oslo_vmware.api [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240490, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.818720] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240492, 'name': MoveVirtualDisk_Task} progress is 15%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.874988] env[68674]: DEBUG oslo_concurrency.lockutils [req-c6b29cff-3992-4924-afe7-369b49d054a9 req-ece83e77-e744-497e-b801-1aea49b41670 service nova] Releasing lock "refresh_cache-2a7a6269-65a8-402c-b174-a4a46d20a33a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 978.875387] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquired lock "refresh_cache-2a7a6269-65a8-402c-b174-a4a46d20a33a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 978.875602] env[68674]: DEBUG nova.network.neutron [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 978.996960] env[68674]: DEBUG nova.compute.manager [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 979.027393] env[68674]: DEBUG nova.virt.hardware [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 979.027734] env[68674]: DEBUG nova.virt.hardware [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 979.027908] env[68674]: DEBUG nova.virt.hardware [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 979.028065] env[68674]: DEBUG nova.virt.hardware [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 979.028217] env[68674]: DEBUG nova.virt.hardware [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 979.028371] env[68674]: DEBUG nova.virt.hardware [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 979.028580] env[68674]: DEBUG nova.virt.hardware [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 979.028736] env[68674]: DEBUG nova.virt.hardware [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 979.029420] env[68674]: DEBUG nova.virt.hardware [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 979.029420] env[68674]: DEBUG nova.virt.hardware [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 979.029420] env[68674]: DEBUG nova.virt.hardware [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 979.030823] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ce24d966-e593-4c43-885e-64ce7013245f tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.067s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 979.032935] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28d29f7c-93a1-47ab-8511-9023b2a8bc2d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.041529] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0cb8741f-f1db-4a0d-8d1d-3b1f55754d0f tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.691s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 979.041794] env[68674]: DEBUG nova.objects.instance [None req-0cb8741f-f1db-4a0d-8d1d-3b1f55754d0f tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lazy-loading 'resources' on Instance uuid 77fa5a89-961b-4c84-a75e-a5be0253677e {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 979.052358] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fad522e-f0e3-489b-8bce-0fc37d0047ac {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.065067] env[68674]: INFO nova.scheduler.client.report [None req-ce24d966-e593-4c43-885e-64ce7013245f tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Deleted allocations for instance 082fd3a5-b30e-41cc-8fba-dab2802a1e3e [ 979.078842] env[68674]: DEBUG oslo_vmware.api [req-c29b446a-a0ec-431c-ac3c-219cefda2c25 req-672d1330-33b3-464d-920b-ad2a373a1177 service nova] Task: {'id': task-3240485, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.268231] env[68674]: DEBUG oslo_vmware.api [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240490, 'name': RemoveSnapshot_Task, 'duration_secs': 0.610077} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.268231] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Deleted Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 979.268433] env[68674]: INFO nova.compute.manager [None req-f65bd679-2a9c-4cda-a6fb-11bf4a153fdc tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Took 14.65 seconds to snapshot the instance on the hypervisor. [ 979.320302] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240492, 'name': MoveVirtualDisk_Task} progress is 38%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.559687] env[68674]: DEBUG nova.objects.instance [None req-41d55833-31f4-4777-baef-bb0a6da9377e tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lazy-loading 'flavor' on Instance uuid 2d02adff-9fbf-4889-99e4-4efde5a51b33 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 979.575475] env[68674]: DEBUG oslo_vmware.api [req-c29b446a-a0ec-431c-ac3c-219cefda2c25 req-672d1330-33b3-464d-920b-ad2a373a1177 service nova] Task: {'id': task-3240485, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.587887] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ce24d966-e593-4c43-885e-64ce7013245f tempest-InstanceActionsNegativeTestJSON-1920051802 tempest-InstanceActionsNegativeTestJSON-1920051802-project-member] Lock "082fd3a5-b30e-41cc-8fba-dab2802a1e3e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.763s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 979.715489] env[68674]: DEBUG nova.network.neutron [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 979.841994] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240492, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.904305] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0d346a6-c8f1-4b71-a4de-4fd7b886f312 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.911410] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daffc8d8-1cd6-488f-be20-78dfa66aef21 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.946132] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa536a85-839e-4674-98e3-21421f41b989 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.961993] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f420a31-1ac2-44f3-8367-44e927190379 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.978803] env[68674]: DEBUG nova.compute.provider_tree [None req-0cb8741f-f1db-4a0d-8d1d-3b1f55754d0f tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 980.073583] env[68674]: DEBUG oslo_concurrency.lockutils [None req-41d55833-31f4-4777-baef-bb0a6da9377e tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "2d02adff-9fbf-4889-99e4-4efde5a51b33" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.961s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 980.074586] env[68674]: DEBUG oslo_vmware.api [req-c29b446a-a0ec-431c-ac3c-219cefda2c25 req-672d1330-33b3-464d-920b-ad2a373a1177 service nova] Task: {'id': task-3240485, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.195937] env[68674]: DEBUG nova.network.neutron [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Updating instance_info_cache with network_info: [{"id": "4fae8d88-2aaa-48bd-b0c4-72bc768efce3", "address": "fa:16:3e:f8:a7:c2", "network": {"id": "cd9a6296-fa96-4117-b8b5-3884d0d16745", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1543887384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61ea6bfeb37d470a970e9c98e4827ade", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fae8d88-2a", "ovs_interfaceid": "4fae8d88-2aaa-48bd-b0c4-72bc768efce3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 980.320869] env[68674]: DEBUG nova.compute.manager [req-f5b2a422-a70a-45b9-b133-565a5d11177c req-c975dc26-257c-4c85-8b32-5f28ec2e70bb service nova] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Received event network-vif-plugged-e6a3416c-8601-4d3f-8b5b-74d43a100d6c {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 980.321198] env[68674]: DEBUG oslo_concurrency.lockutils [req-f5b2a422-a70a-45b9-b133-565a5d11177c req-c975dc26-257c-4c85-8b32-5f28ec2e70bb service nova] Acquiring lock "fa8c58b7-a462-437f-b1ed-57fef6aa3903-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 980.321467] env[68674]: DEBUG oslo_concurrency.lockutils [req-f5b2a422-a70a-45b9-b133-565a5d11177c req-c975dc26-257c-4c85-8b32-5f28ec2e70bb service nova] Lock "fa8c58b7-a462-437f-b1ed-57fef6aa3903-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.321646] env[68674]: DEBUG oslo_concurrency.lockutils [req-f5b2a422-a70a-45b9-b133-565a5d11177c req-c975dc26-257c-4c85-8b32-5f28ec2e70bb service nova] Lock "fa8c58b7-a462-437f-b1ed-57fef6aa3903-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 980.321814] env[68674]: DEBUG nova.compute.manager [req-f5b2a422-a70a-45b9-b133-565a5d11177c req-c975dc26-257c-4c85-8b32-5f28ec2e70bb service nova] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] No waiting events found dispatching network-vif-plugged-e6a3416c-8601-4d3f-8b5b-74d43a100d6c {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 980.322196] env[68674]: WARNING nova.compute.manager [req-f5b2a422-a70a-45b9-b133-565a5d11177c req-c975dc26-257c-4c85-8b32-5f28ec2e70bb service nova] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Received unexpected event network-vif-plugged-e6a3416c-8601-4d3f-8b5b-74d43a100d6c for instance with vm_state building and task_state spawning. [ 980.329719] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240492, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.380915] env[68674]: DEBUG nova.network.neutron [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Successfully updated port: e6a3416c-8601-4d3f-8b5b-74d43a100d6c {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 980.485577] env[68674]: DEBUG nova.scheduler.client.report [None req-0cb8741f-f1db-4a0d-8d1d-3b1f55754d0f tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 980.574580] env[68674]: DEBUG oslo_vmware.api [req-c29b446a-a0ec-431c-ac3c-219cefda2c25 req-672d1330-33b3-464d-920b-ad2a373a1177 service nova] Task: {'id': task-3240485, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.698944] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Releasing lock "refresh_cache-2a7a6269-65a8-402c-b174-a4a46d20a33a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 980.699337] env[68674]: DEBUG nova.compute.manager [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Instance network_info: |[{"id": "4fae8d88-2aaa-48bd-b0c4-72bc768efce3", "address": "fa:16:3e:f8:a7:c2", "network": {"id": "cd9a6296-fa96-4117-b8b5-3884d0d16745", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1543887384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61ea6bfeb37d470a970e9c98e4827ade", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fae8d88-2a", "ovs_interfaceid": "4fae8d88-2aaa-48bd-b0c4-72bc768efce3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 980.699770] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:a7:c2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '40c947c4-f471-4d48-8e43-fee54198107e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4fae8d88-2aaa-48bd-b0c4-72bc768efce3', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 980.707869] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 980.708156] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 980.708390] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-778724cf-16b4-4bb4-8668-04789b7da1c4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.730498] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 980.730498] env[68674]: value = "task-3240493" [ 980.730498] env[68674]: _type = "Task" [ 980.730498] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.738994] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240493, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.823462] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240492, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.867028] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a81d5820-6d82-4dc3-af70-335d195abcec tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "ffdd1c62-1b4e-40cf-a27e-ff2877439701" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 980.869092] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a81d5820-6d82-4dc3-af70-335d195abcec tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "ffdd1c62-1b4e-40cf-a27e-ff2877439701" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.869092] env[68674]: DEBUG nova.compute.manager [None req-a81d5820-6d82-4dc3-af70-335d195abcec tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Going to confirm migration 5 {{(pid=68674) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 980.888112] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Acquiring lock "refresh_cache-fa8c58b7-a462-437f-b1ed-57fef6aa3903" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.888112] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Acquired lock "refresh_cache-fa8c58b7-a462-437f-b1ed-57fef6aa3903" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 980.888112] env[68674]: DEBUG nova.network.neutron [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 980.994307] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0cb8741f-f1db-4a0d-8d1d-3b1f55754d0f tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.950s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 980.994307] env[68674]: DEBUG oslo_concurrency.lockutils [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.782s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.998083] env[68674]: INFO nova.compute.claims [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 981.025062] env[68674]: INFO nova.scheduler.client.report [None req-0cb8741f-f1db-4a0d-8d1d-3b1f55754d0f tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Deleted allocations for instance 77fa5a89-961b-4c84-a75e-a5be0253677e [ 981.074412] env[68674]: DEBUG oslo_vmware.api [req-c29b446a-a0ec-431c-ac3c-219cefda2c25 req-672d1330-33b3-464d-920b-ad2a373a1177 service nova] Task: {'id': task-3240485, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.240875] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240493, 'name': CreateVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.289843] env[68674]: INFO nova.compute.manager [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Rebuilding instance [ 981.324876] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240492, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.565587} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.325248] env[68674]: INFO nova.virt.vmwareapi.ds_util [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_87b7359c-e5de-4d54-b220-7d8a3353fedb/OSTACK_IMG_87b7359c-e5de-4d54-b220-7d8a3353fedb.vmdk to [datastore1] devstack-image-cache_base/563ae351-528c-4f48-afe4-222e4f9dee21/563ae351-528c-4f48-afe4-222e4f9dee21.vmdk. [ 981.325519] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Cleaning up location [datastore1] OSTACK_IMG_87b7359c-e5de-4d54-b220-7d8a3353fedb {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 981.325766] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_87b7359c-e5de-4d54-b220-7d8a3353fedb {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 981.326056] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5ade95af-3751-4236-8338-c9ba158462b1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.339920] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 981.339920] env[68674]: value = "task-3240494" [ 981.339920] env[68674]: _type = "Task" [ 981.339920] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.346524] env[68674]: DEBUG nova.compute.manager [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 981.347773] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac19f0c6-de28-4833-978a-18fb47aa7ce1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.361679] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240494, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.460854] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a81d5820-6d82-4dc3-af70-335d195abcec tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "refresh_cache-ffdd1c62-1b4e-40cf-a27e-ff2877439701" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.460854] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a81d5820-6d82-4dc3-af70-335d195abcec tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquired lock "refresh_cache-ffdd1c62-1b4e-40cf-a27e-ff2877439701" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 981.460854] env[68674]: DEBUG nova.network.neutron [None req-a81d5820-6d82-4dc3-af70-335d195abcec tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 981.460854] env[68674]: DEBUG nova.objects.instance [None req-a81d5820-6d82-4dc3-af70-335d195abcec tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lazy-loading 'info_cache' on Instance uuid ffdd1c62-1b4e-40cf-a27e-ff2877439701 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 981.473211] env[68674]: DEBUG nova.network.neutron [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 981.532680] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0cb8741f-f1db-4a0d-8d1d-3b1f55754d0f tempest-MigrationsAdminTest-360829146 tempest-MigrationsAdminTest-360829146-project-member] Lock "77fa5a89-961b-4c84-a75e-a5be0253677e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.697s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 981.574940] env[68674]: DEBUG oslo_vmware.api [req-c29b446a-a0ec-431c-ac3c-219cefda2c25 req-672d1330-33b3-464d-920b-ad2a373a1177 service nova] Task: {'id': task-3240485, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.722484] env[68674]: DEBUG nova.network.neutron [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Updating instance_info_cache with network_info: [{"id": "e6a3416c-8601-4d3f-8b5b-74d43a100d6c", "address": "fa:16:3e:28:43:ab", "network": {"id": "4afa0a78-13f3-4d61-91ab-0ff686045241", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1736462154-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d376e63760b4b708305a7b0aafd98a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4712af2-45ef-4652-8d2c-482ec70056d0", "external-id": "nsx-vlan-transportzone-826", "segmentation_id": 826, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6a3416c-86", "ovs_interfaceid": "e6a3416c-8601-4d3f-8b5b-74d43a100d6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.742342] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240493, 'name': CreateVM_Task, 'duration_secs': 0.541085} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.742540] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 981.743277] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.743448] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 981.743758] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 981.744015] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f36e054-3596-4555-8b99-e090074123df {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.749385] env[68674]: DEBUG oslo_vmware.api [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 981.749385] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52bcaee7-12fe-dc01-6f6e-fe461233069b" [ 981.749385] env[68674]: _type = "Task" [ 981.749385] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.758423] env[68674]: DEBUG oslo_vmware.api [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52bcaee7-12fe-dc01-6f6e-fe461233069b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.854342] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240494, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144888} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.854608] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 981.854777] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Releasing lock "[datastore1] devstack-image-cache_base/563ae351-528c-4f48-afe4-222e4f9dee21/563ae351-528c-4f48-afe4-222e4f9dee21.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 981.855041] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/563ae351-528c-4f48-afe4-222e4f9dee21/563ae351-528c-4f48-afe4-222e4f9dee21.vmdk to [datastore1] 63d6c185-db2c-4ede-a716-9a0dd432ab1f/63d6c185-db2c-4ede-a716-9a0dd432ab1f.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 981.855318] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e3a88834-11fb-4af8-ad87-1cd4c66c50c5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.865017] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 981.865017] env[68674]: value = "task-3240495" [ 981.865017] env[68674]: _type = "Task" [ 981.865017] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.874395] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240495, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.078974] env[68674]: DEBUG oslo_vmware.api [req-c29b446a-a0ec-431c-ac3c-219cefda2c25 req-672d1330-33b3-464d-920b-ad2a373a1177 service nova] Task: {'id': task-3240485, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.225763] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Releasing lock "refresh_cache-fa8c58b7-a462-437f-b1ed-57fef6aa3903" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 982.226627] env[68674]: DEBUG nova.compute.manager [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Instance network_info: |[{"id": "e6a3416c-8601-4d3f-8b5b-74d43a100d6c", "address": "fa:16:3e:28:43:ab", "network": {"id": "4afa0a78-13f3-4d61-91ab-0ff686045241", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1736462154-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d376e63760b4b708305a7b0aafd98a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4712af2-45ef-4652-8d2c-482ec70056d0", "external-id": "nsx-vlan-transportzone-826", "segmentation_id": 826, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6a3416c-86", "ovs_interfaceid": "e6a3416c-8601-4d3f-8b5b-74d43a100d6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 982.226627] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:28:43:ab', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4712af2-45ef-4652-8d2c-482ec70056d0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e6a3416c-8601-4d3f-8b5b-74d43a100d6c', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 982.235793] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Creating folder: Project (2d376e63760b4b708305a7b0aafd98a3). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 982.238980] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9867ce97-b4f6-405e-b416-ff0f5628a959 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.258688] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Created folder: Project (2d376e63760b4b708305a7b0aafd98a3) in parent group-v647377. [ 982.258896] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Creating folder: Instances. Parent ref: group-v647631. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 982.259173] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fbca5d66-85d9-4d4e-8dd2-814161d716bc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.267065] env[68674]: DEBUG oslo_vmware.api [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52bcaee7-12fe-dc01-6f6e-fe461233069b, 'name': SearchDatastore_Task, 'duration_secs': 0.067317} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.270060] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 982.270374] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 982.270623] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.270773] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 982.270974] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 982.272661] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7f41a1d5-ce14-4ba5-b3a6-b121b42d2226 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.274657] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Created folder: Instances in parent group-v647631. [ 982.274888] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 982.275100] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 982.276036] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c457e8ff-6a49-4e59-84ca-3f7d597f51c6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.296042] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 982.296042] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 982.297075] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d28477b9-adc7-41e9-a4f7-a41be56d22d3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.304177] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 982.304177] env[68674]: value = "task-3240498" [ 982.304177] env[68674]: _type = "Task" [ 982.304177] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.307741] env[68674]: DEBUG oslo_vmware.api [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 982.307741] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52dea707-cb43-5144-8886-8a41d84ab2b3" [ 982.307741] env[68674]: _type = "Task" [ 982.307741] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.317822] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240498, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.323836] env[68674]: DEBUG oslo_vmware.api [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52dea707-cb43-5144-8886-8a41d84ab2b3, 'name': SearchDatastore_Task, 'duration_secs': 0.012306} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.324742] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13d53d3a-f673-40cc-9976-2871a5f79dbf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.332802] env[68674]: DEBUG oslo_vmware.api [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 982.332802] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5289e6ff-4b56-b382-9b7b-e76e8b60cbda" [ 982.332802] env[68674]: _type = "Task" [ 982.332802] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.342062] env[68674]: DEBUG oslo_vmware.api [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5289e6ff-4b56-b382-9b7b-e76e8b60cbda, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.343199] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc3c5c7e-09eb-4571-b119-07aff06f82a0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.352480] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d288c596-8ee8-49a1-a16d-acfbce5ffd16 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.389812] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 982.391653] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4846d583-436f-4289-966e-12925711ce2f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.398841] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-117dd21a-bf01-417b-a935-9aa3df298a12 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.403172] env[68674]: DEBUG nova.compute.manager [req-32c3e044-c9ad-40fc-a1f4-6b8abf05d032 req-778b842b-d747-46f3-88de-100f626cc2db service nova] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Received event network-changed-e6a3416c-8601-4d3f-8b5b-74d43a100d6c {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 982.403172] env[68674]: DEBUG nova.compute.manager [req-32c3e044-c9ad-40fc-a1f4-6b8abf05d032 req-778b842b-d747-46f3-88de-100f626cc2db service nova] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Refreshing instance network info cache due to event network-changed-e6a3416c-8601-4d3f-8b5b-74d43a100d6c. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 982.403172] env[68674]: DEBUG oslo_concurrency.lockutils [req-32c3e044-c9ad-40fc-a1f4-6b8abf05d032 req-778b842b-d747-46f3-88de-100f626cc2db service nova] Acquiring lock "refresh_cache-fa8c58b7-a462-437f-b1ed-57fef6aa3903" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.403360] env[68674]: DEBUG oslo_concurrency.lockutils [req-32c3e044-c9ad-40fc-a1f4-6b8abf05d032 req-778b842b-d747-46f3-88de-100f626cc2db service nova] Acquired lock "refresh_cache-fa8c58b7-a462-437f-b1ed-57fef6aa3903" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 982.403507] env[68674]: DEBUG nova.network.neutron [req-32c3e044-c9ad-40fc-a1f4-6b8abf05d032 req-778b842b-d747-46f3-88de-100f626cc2db service nova] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Refreshing network info cache for port e6a3416c-8601-4d3f-8b5b-74d43a100d6c {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 982.414407] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240495, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.417683] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5411d5dc-235d-40dd-a67a-01922ddabe4a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.422335] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 982.422335] env[68674]: value = "task-3240499" [ 982.422335] env[68674]: _type = "Task" [ 982.422335] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.437052] env[68674]: DEBUG nova.compute.manager [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 982.437659] env[68674]: DEBUG nova.compute.provider_tree [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 982.440049] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3b76ed0-083a-4246-ab68-197e80309c4a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.447339] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240499, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.578367] env[68674]: DEBUG oslo_vmware.api [req-c29b446a-a0ec-431c-ac3c-219cefda2c25 req-672d1330-33b3-464d-920b-ad2a373a1177 service nova] Task: {'id': task-3240485, 'name': ReconfigVM_Task, 'duration_secs': 5.788821} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.578761] env[68674]: DEBUG oslo_concurrency.lockutils [req-c29b446a-a0ec-431c-ac3c-219cefda2c25 req-672d1330-33b3-464d-920b-ad2a373a1177 service nova] Releasing lock "0e7c5243-ad33-4391-8977-b9019643e3de" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 982.578942] env[68674]: DEBUG nova.virt.vmwareapi.vmops [req-c29b446a-a0ec-431c-ac3c-219cefda2c25 req-672d1330-33b3-464d-920b-ad2a373a1177 service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Reconfigured VM to detach interface {{(pid=68674) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 982.579489] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b40cd1b7-3f28-45aa-9d60-843bfbc5fe30 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "0e7c5243-ad33-4391-8977-b9019643e3de" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 6.011s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 982.580183] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b40cd1b7-3f28-45aa-9d60-843bfbc5fe30 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "0e7c5243-ad33-4391-8977-b9019643e3de-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 982.580183] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b40cd1b7-3f28-45aa-9d60-843bfbc5fe30 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "0e7c5243-ad33-4391-8977-b9019643e3de-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 982.580183] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b40cd1b7-3f28-45aa-9d60-843bfbc5fe30 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "0e7c5243-ad33-4391-8977-b9019643e3de-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 982.585193] env[68674]: INFO nova.compute.manager [None req-b40cd1b7-3f28-45aa-9d60-843bfbc5fe30 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Terminating instance [ 982.748537] env[68674]: DEBUG oslo_concurrency.lockutils [None req-431fdfd1-bc0b-4766-8545-c92548b8ec1a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "f70145c9-4846-42e1-9c1c-de9759097abd" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 982.748796] env[68674]: DEBUG oslo_concurrency.lockutils [None req-431fdfd1-bc0b-4766-8545-c92548b8ec1a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "f70145c9-4846-42e1-9c1c-de9759097abd" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 982.817624] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240498, 'name': CreateVM_Task, 'duration_secs': 0.430339} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.817864] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 982.818772] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.818974] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 982.819363] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 982.819591] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aba68db3-3703-4cb0-b94d-a59db0580b17 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.824060] env[68674]: DEBUG nova.network.neutron [None req-a81d5820-6d82-4dc3-af70-335d195abcec tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Updating instance_info_cache with network_info: [{"id": "88af4dfc-59d6-4564-9ca9-d5383ed87da6", "address": "fa:16:3e:92:28:03", "network": {"id": "d412f884-932c-461f-8f04-990897b04532", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-692483706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6b179855b874365964446f95f9f5a53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap88af4dfc-59", "ovs_interfaceid": "88af4dfc-59d6-4564-9ca9-d5383ed87da6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.827467] env[68674]: DEBUG oslo_vmware.api [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Waiting for the task: (returnval){ [ 982.827467] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b2caf0-3367-9c8d-c83b-93a54fd1a4a9" [ 982.827467] env[68674]: _type = "Task" [ 982.827467] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.843981] env[68674]: DEBUG oslo_vmware.api [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b2caf0-3367-9c8d-c83b-93a54fd1a4a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.855928] env[68674]: DEBUG oslo_vmware.api [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5289e6ff-4b56-b382-9b7b-e76e8b60cbda, 'name': SearchDatastore_Task, 'duration_secs': 0.018828} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.856179] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 982.856442] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 2a7a6269-65a8-402c-b174-a4a46d20a33a/2a7a6269-65a8-402c-b174-a4a46d20a33a.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 982.856718] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fe7685f3-b5ce-472e-b39a-e77359011ac0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.867323] env[68674]: DEBUG oslo_vmware.api [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 982.867323] env[68674]: value = "task-3240500" [ 982.867323] env[68674]: _type = "Task" [ 982.867323] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.878261] env[68674]: DEBUG oslo_vmware.api [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240500, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.906395] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240495, 'name': CopyVirtualDisk_Task} progress is 26%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.934788] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240499, 'name': PowerOffVM_Task, 'duration_secs': 0.264866} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.935294] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 982.949764] env[68674]: DEBUG nova.scheduler.client.report [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 982.956440] env[68674]: INFO nova.compute.manager [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] instance snapshotting [ 982.959147] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5c3f627-359d-4d32-bac9-fab46e089103 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.988857] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cfacf8b-de46-4603-94b4-c16fdf649c92 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.005121] env[68674]: INFO nova.compute.manager [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Detaching volume 2110fa31-61e9-4ce5-a495-f1f566fee58d [ 983.041844] env[68674]: INFO nova.virt.block_device [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Attempting to driver detach volume 2110fa31-61e9-4ce5-a495-f1f566fee58d from mountpoint /dev/sdb [ 983.042705] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Volume detach. Driver type: vmdk {{(pid=68674) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 983.042705] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647627', 'volume_id': '2110fa31-61e9-4ce5-a495-f1f566fee58d', 'name': 'volume-2110fa31-61e9-4ce5-a495-f1f566fee58d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2d02adff-9fbf-4889-99e4-4efde5a51b33', 'attached_at': '', 'detached_at': '', 'volume_id': '2110fa31-61e9-4ce5-a495-f1f566fee58d', 'serial': '2110fa31-61e9-4ce5-a495-f1f566fee58d'} {{(pid=68674) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 983.043331] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ce58d8c-dbcc-4bc4-a786-5fbd6371439d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.073885] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e9efcdf-1afd-4c28-9926-9bf1bc9a02f1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.086401] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee972961-eb02-4b04-92f7-96c6ac8d33d4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.112177] env[68674]: DEBUG nova.compute.manager [None req-b40cd1b7-3f28-45aa-9d60-843bfbc5fe30 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 983.112514] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b40cd1b7-3f28-45aa-9d60-843bfbc5fe30 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 983.116937] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40141737-e234-4364-9aa6-c7b95a2106b1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.120936] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d176c71-cf11-4cc3-bf9f-0c77b821b86f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.145308] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] The volume has not been displaced from its original location: [datastore1] volume-2110fa31-61e9-4ce5-a495-f1f566fee58d/volume-2110fa31-61e9-4ce5-a495-f1f566fee58d.vmdk. No consolidation needed. {{(pid=68674) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 983.151078] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Reconfiguring VM instance instance-0000004d to detach disk 2001 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 983.154286] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ee621d8-e530-473f-a9ae-754011bceed7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.168434] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b40cd1b7-3f28-45aa-9d60-843bfbc5fe30 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 983.168744] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d7775d2f-c1f3-4c5d-8145-3d56e0679d2d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.177550] env[68674]: DEBUG oslo_vmware.api [None req-b40cd1b7-3f28-45aa-9d60-843bfbc5fe30 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 983.177550] env[68674]: value = "task-3240501" [ 983.177550] env[68674]: _type = "Task" [ 983.177550] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.179431] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 983.179431] env[68674]: value = "task-3240502" [ 983.179431] env[68674]: _type = "Task" [ 983.179431] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.193610] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240502, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.197586] env[68674]: DEBUG oslo_vmware.api [None req-b40cd1b7-3f28-45aa-9d60-843bfbc5fe30 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240501, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.252915] env[68674]: DEBUG nova.compute.utils [None req-431fdfd1-bc0b-4766-8545-c92548b8ec1a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 983.329434] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a81d5820-6d82-4dc3-af70-335d195abcec tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Releasing lock "refresh_cache-ffdd1c62-1b4e-40cf-a27e-ff2877439701" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 983.329715] env[68674]: DEBUG nova.objects.instance [None req-a81d5820-6d82-4dc3-af70-335d195abcec tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lazy-loading 'migration_context' on Instance uuid ffdd1c62-1b4e-40cf-a27e-ff2877439701 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 983.335919] env[68674]: DEBUG nova.network.neutron [req-32c3e044-c9ad-40fc-a1f4-6b8abf05d032 req-778b842b-d747-46f3-88de-100f626cc2db service nova] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Updated VIF entry in instance network info cache for port e6a3416c-8601-4d3f-8b5b-74d43a100d6c. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 983.336555] env[68674]: DEBUG nova.network.neutron [req-32c3e044-c9ad-40fc-a1f4-6b8abf05d032 req-778b842b-d747-46f3-88de-100f626cc2db service nova] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Updating instance_info_cache with network_info: [{"id": "e6a3416c-8601-4d3f-8b5b-74d43a100d6c", "address": "fa:16:3e:28:43:ab", "network": {"id": "4afa0a78-13f3-4d61-91ab-0ff686045241", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1736462154-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d376e63760b4b708305a7b0aafd98a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4712af2-45ef-4652-8d2c-482ec70056d0", "external-id": "nsx-vlan-transportzone-826", "segmentation_id": 826, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6a3416c-86", "ovs_interfaceid": "e6a3416c-8601-4d3f-8b5b-74d43a100d6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.345055] env[68674]: DEBUG oslo_vmware.api [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b2caf0-3367-9c8d-c83b-93a54fd1a4a9, 'name': SearchDatastore_Task, 'duration_secs': 0.0894} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.346332] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 983.346575] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 983.346819] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.347051] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 983.347253] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 983.347816] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-67bd6f64-cbd6-4845-acd5-53f652282cb3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.368263] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 983.368519] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 983.372671] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-477c09de-f0a6-475a-ab42-b8005f0bab3f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.383803] env[68674]: DEBUG oslo_vmware.api [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240500, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.384256] env[68674]: DEBUG oslo_vmware.api [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Waiting for the task: (returnval){ [ 983.384256] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52cdbd2b-dced-d984-3c49-5fbd8b9c6301" [ 983.384256] env[68674]: _type = "Task" [ 983.384256] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.403571] env[68674]: DEBUG oslo_vmware.api [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52cdbd2b-dced-d984-3c49-5fbd8b9c6301, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.412755] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240495, 'name': CopyVirtualDisk_Task} progress is 49%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.455960] env[68674]: DEBUG oslo_concurrency.lockutils [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.462s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 983.457769] env[68674]: DEBUG oslo_concurrency.lockutils [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.437s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 983.459264] env[68674]: INFO nova.compute.claims [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 983.503414] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Creating Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 983.503808] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-530ffb52-eba5-44c6-8cc6-29e3303a8606 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.514312] env[68674]: DEBUG oslo_vmware.api [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Waiting for the task: (returnval){ [ 983.514312] env[68674]: value = "task-3240503" [ 983.514312] env[68674]: _type = "Task" [ 983.514312] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.525626] env[68674]: DEBUG oslo_vmware.api [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240503, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.700061] env[68674]: DEBUG oslo_vmware.api [None req-b40cd1b7-3f28-45aa-9d60-843bfbc5fe30 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240501, 'name': PowerOffVM_Task, 'duration_secs': 0.479201} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.700402] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240502, 'name': ReconfigVM_Task, 'duration_secs': 0.481764} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.700690] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b40cd1b7-3f28-45aa-9d60-843bfbc5fe30 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 983.700860] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b40cd1b7-3f28-45aa-9d60-843bfbc5fe30 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 983.701196] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Reconfigured VM instance instance-0000004d to detach disk 2001 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 983.706169] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-65004a56-b96e-4e83-9729-f05cd6574930 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.707985] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1040de6f-3a82-400e-a465-1782e4b6b8f0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.729615] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 983.729615] env[68674]: value = "task-3240505" [ 983.729615] env[68674]: _type = "Task" [ 983.729615] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.742255] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240505, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.757411] env[68674]: DEBUG oslo_concurrency.lockutils [None req-431fdfd1-bc0b-4766-8545-c92548b8ec1a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "f70145c9-4846-42e1-9c1c-de9759097abd" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 983.837592] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b40cd1b7-3f28-45aa-9d60-843bfbc5fe30 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 983.837862] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b40cd1b7-3f28-45aa-9d60-843bfbc5fe30 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 983.838089] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-b40cd1b7-3f28-45aa-9d60-843bfbc5fe30 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Deleting the datastore file [datastore1] 0e7c5243-ad33-4391-8977-b9019643e3de {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 983.838984] env[68674]: DEBUG nova.objects.base [None req-a81d5820-6d82-4dc3-af70-335d195abcec tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=68674) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 983.839498] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cadedd9c-6699-4dab-960e-82ab946c71c3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.842669] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5386e895-14b1-4417-bc3e-ad68e8c34c90 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.846078] env[68674]: DEBUG oslo_concurrency.lockutils [req-32c3e044-c9ad-40fc-a1f4-6b8abf05d032 req-778b842b-d747-46f3-88de-100f626cc2db service nova] Releasing lock "refresh_cache-fa8c58b7-a462-437f-b1ed-57fef6aa3903" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 983.867558] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d2fe065-8f96-4b97-83aa-ea5efa9353e2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.870864] env[68674]: DEBUG oslo_vmware.api [None req-b40cd1b7-3f28-45aa-9d60-843bfbc5fe30 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 983.870864] env[68674]: value = "task-3240506" [ 983.870864] env[68674]: _type = "Task" [ 983.870864] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.881210] env[68674]: DEBUG oslo_vmware.api [None req-a81d5820-6d82-4dc3-af70-335d195abcec tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 983.881210] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528b67c1-0a8a-06c0-7685-4943469fa2d2" [ 983.881210] env[68674]: _type = "Task" [ 983.881210] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.892347] env[68674]: DEBUG oslo_vmware.api [None req-b40cd1b7-3f28-45aa-9d60-843bfbc5fe30 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240506, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.892758] env[68674]: DEBUG oslo_vmware.api [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240500, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.904477] env[68674]: DEBUG oslo_vmware.api [None req-a81d5820-6d82-4dc3-af70-335d195abcec tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528b67c1-0a8a-06c0-7685-4943469fa2d2, 'name': SearchDatastore_Task, 'duration_secs': 0.011421} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.905737] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a81d5820-6d82-4dc3-af70-335d195abcec tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 983.912490] env[68674]: DEBUG oslo_vmware.api [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52cdbd2b-dced-d984-3c49-5fbd8b9c6301, 'name': SearchDatastore_Task, 'duration_secs': 0.095043} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.917175] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240495, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.917369] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e210c09-7634-4f61-af11-6c16d7ca45a8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.924947] env[68674]: DEBUG oslo_vmware.api [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Waiting for the task: (returnval){ [ 983.924947] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52272b37-281e-0387-8657-20ddaf8ce098" [ 983.924947] env[68674]: _type = "Task" [ 983.924947] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.937266] env[68674]: DEBUG oslo_vmware.api [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52272b37-281e-0387-8657-20ddaf8ce098, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.963350] env[68674]: DEBUG oslo_concurrency.lockutils [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Acquiring lock "9949053a-6b4a-4d66-bb49-b0228d63e9ca" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 983.963775] env[68674]: DEBUG oslo_concurrency.lockutils [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Lock "9949053a-6b4a-4d66-bb49-b0228d63e9ca" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 984.027104] env[68674]: DEBUG oslo_vmware.api [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240503, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.242654] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240505, 'name': ReconfigVM_Task, 'duration_secs': 0.213722} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.243012] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647627', 'volume_id': '2110fa31-61e9-4ce5-a495-f1f566fee58d', 'name': 'volume-2110fa31-61e9-4ce5-a495-f1f566fee58d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2d02adff-9fbf-4889-99e4-4efde5a51b33', 'attached_at': '', 'detached_at': '', 'volume_id': '2110fa31-61e9-4ce5-a495-f1f566fee58d', 'serial': '2110fa31-61e9-4ce5-a495-f1f566fee58d'} {{(pid=68674) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 984.388732] env[68674]: DEBUG oslo_vmware.api [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240500, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.392302] env[68674]: DEBUG oslo_vmware.api [None req-b40cd1b7-3f28-45aa-9d60-843bfbc5fe30 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240506, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.403760] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240495, 'name': CopyVirtualDisk_Task} progress is 88%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.438486] env[68674]: DEBUG oslo_vmware.api [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52272b37-281e-0387-8657-20ddaf8ce098, 'name': SearchDatastore_Task, 'duration_secs': 0.089617} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.438949] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 984.439261] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] fa8c58b7-a462-437f-b1ed-57fef6aa3903/fa8c58b7-a462-437f-b1ed-57fef6aa3903.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 984.439551] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3aa240bc-a15c-4a7c-b223-0217ffff258b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.448797] env[68674]: DEBUG oslo_vmware.api [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Waiting for the task: (returnval){ [ 984.448797] env[68674]: value = "task-3240507" [ 984.448797] env[68674]: _type = "Task" [ 984.448797] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.460929] env[68674]: DEBUG oslo_vmware.api [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Task: {'id': task-3240507, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.468116] env[68674]: DEBUG oslo_concurrency.lockutils [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Lock "9949053a-6b4a-4d66-bb49-b0228d63e9ca" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.504s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 984.468722] env[68674]: DEBUG nova.compute.manager [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 984.534276] env[68674]: DEBUG oslo_vmware.api [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240503, 'name': CreateSnapshot_Task, 'duration_secs': 0.640747} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.534481] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Created Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 984.537123] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62201886-989c-4945-97da-37de75b166f3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.829249] env[68674]: DEBUG oslo_concurrency.lockutils [None req-431fdfd1-bc0b-4766-8545-c92548b8ec1a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "f70145c9-4846-42e1-9c1c-de9759097abd" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 984.829249] env[68674]: DEBUG oslo_concurrency.lockutils [None req-431fdfd1-bc0b-4766-8545-c92548b8ec1a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "f70145c9-4846-42e1-9c1c-de9759097abd" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 984.829249] env[68674]: INFO nova.compute.manager [None req-431fdfd1-bc0b-4766-8545-c92548b8ec1a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Attaching volume 09b1b6e3-ad70-4884-a142-77859302b0e3 to /dev/sdb [ 984.883822] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a380ff76-9a50-49e2-b6b4-f6535bc96d29 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.893851] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de42f73d-90e9-46dc-bd0b-01bed71f61a4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.899847] env[68674]: DEBUG oslo_vmware.api [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240500, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.899847] env[68674]: DEBUG oslo_vmware.api [None req-b40cd1b7-3f28-45aa-9d60-843bfbc5fe30 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240506, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.906236] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2708a4ad-043a-488b-812b-ccf40382ab1e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.916247] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240495, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.867794} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.917694] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f74c0fc-4528-481d-8eba-f3675ad158fd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.921157] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/563ae351-528c-4f48-afe4-222e4f9dee21/563ae351-528c-4f48-afe4-222e4f9dee21.vmdk to [datastore1] 63d6c185-db2c-4ede-a716-9a0dd432ab1f/63d6c185-db2c-4ede-a716-9a0dd432ab1f.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 984.950440] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faba6e22-285a-4eaa-b84e-fd17ec1724ae {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.957259] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9990bef0-122c-4f5b-b42a-a89011445eb2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.967028] env[68674]: DEBUG oslo_vmware.api [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Task: {'id': task-3240507, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.983775] env[68674]: DEBUG nova.compute.utils [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 984.995697] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] 63d6c185-db2c-4ede-a716-9a0dd432ab1f/63d6c185-db2c-4ede-a716-9a0dd432ab1f.vmdk or device None with type streamOptimized {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 984.996478] env[68674]: DEBUG nova.virt.block_device [None req-431fdfd1-bc0b-4766-8545-c92548b8ec1a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Updating existing volume attachment record: af69ecf5-57db-4446-92a1-9f5faf3c293f {{(pid=68674) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 984.999782] env[68674]: DEBUG nova.compute.manager [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 984.999983] env[68674]: DEBUG nova.network.neutron [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 985.001881] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a125b99f-f972-4f9b-8570-f0411204df2b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.018647] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d477051d-e3c4-4f96-938e-81e461e3a06b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.044772] env[68674]: DEBUG nova.compute.provider_tree [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 985.046855] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 985.046855] env[68674]: value = "task-3240508" [ 985.046855] env[68674]: _type = "Task" [ 985.046855] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.058710] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240508, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.069665] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Creating linked-clone VM from snapshot {{(pid=68674) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 985.070359] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e3ead188-3d25-49e7-a52f-1028bf413615 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.083379] env[68674]: DEBUG oslo_vmware.api [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Waiting for the task: (returnval){ [ 985.083379] env[68674]: value = "task-3240509" [ 985.083379] env[68674]: _type = "Task" [ 985.083379] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.089219] env[68674]: DEBUG nova.policy [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8e95dcdda871463d82668b3aca7d7728', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '75837b2623e94e5b898b49a24f9d842e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 985.099713] env[68674]: DEBUG oslo_vmware.api [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240509, 'name': CloneVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.326592] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 985.326856] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c2a0c3b9-dd85-4598-91c4-061b62d0ee59 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.338647] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 985.338647] env[68674]: value = "task-3240511" [ 985.338647] env[68674]: _type = "Task" [ 985.338647] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.353332] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] VM already powered off {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 985.353519] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Volume detach. Driver type: vmdk {{(pid=68674) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 985.353914] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647627', 'volume_id': '2110fa31-61e9-4ce5-a495-f1f566fee58d', 'name': 'volume-2110fa31-61e9-4ce5-a495-f1f566fee58d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2d02adff-9fbf-4889-99e4-4efde5a51b33', 'attached_at': '', 'detached_at': '', 'volume_id': '2110fa31-61e9-4ce5-a495-f1f566fee58d', 'serial': '2110fa31-61e9-4ce5-a495-f1f566fee58d'} {{(pid=68674) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 985.356057] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-933af986-9436-41ce-bef7-2ba932bd683f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.393982] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e90c5a42-5771-4af5-8602-522db8b8fb24 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.406662] env[68674]: WARNING nova.virt.vmwareapi.driver [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 985.406848] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 985.407191] env[68674]: DEBUG oslo_vmware.api [None req-b40cd1b7-3f28-45aa-9d60-843bfbc5fe30 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240506, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.479456} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.410905] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8faebeb-e859-48f5-a33f-72b084434a82 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.414052] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-b40cd1b7-3f28-45aa-9d60-843bfbc5fe30 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 985.415101] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b40cd1b7-3f28-45aa-9d60-843bfbc5fe30 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 985.415101] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b40cd1b7-3f28-45aa-9d60-843bfbc5fe30 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 985.415101] env[68674]: INFO nova.compute.manager [None req-b40cd1b7-3f28-45aa-9d60-843bfbc5fe30 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Took 2.30 seconds to destroy the instance on the hypervisor. [ 985.415101] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b40cd1b7-3f28-45aa-9d60-843bfbc5fe30 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 985.415497] env[68674]: DEBUG oslo_vmware.api [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240500, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.290993} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.415543] env[68674]: DEBUG nova.compute.manager [-] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 985.415993] env[68674]: DEBUG nova.network.neutron [-] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 985.417513] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 2a7a6269-65a8-402c-b174-a4a46d20a33a/2a7a6269-65a8-402c-b174-a4a46d20a33a.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 985.417870] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 985.418652] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d32894ac-1c32-4c02-91d9-db8b6317d909 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.424209] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 985.425698] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f5cfb247-59c9-44a7-ba28-2e21f05ca291 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.427765] env[68674]: DEBUG oslo_vmware.api [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 985.427765] env[68674]: value = "task-3240512" [ 985.427765] env[68674]: _type = "Task" [ 985.427765] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.439556] env[68674]: DEBUG oslo_vmware.api [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240512, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.469723] env[68674]: DEBUG oslo_vmware.api [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Task: {'id': task-3240507, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.500214] env[68674]: DEBUG nova.compute.manager [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 985.543449] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 985.543771] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 985.543957] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Deleting the datastore file [datastore2] 2d02adff-9fbf-4889-99e4-4efde5a51b33 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 985.544756] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-131df5e4-3254-47e1-af5a-48eff9a4963a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.549397] env[68674]: DEBUG nova.scheduler.client.report [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 985.559020] env[68674]: DEBUG nova.network.neutron [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Successfully created port: 7eeedd9b-3b07-43ac-a36b-37a42aa42280 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 985.564821] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 985.564821] env[68674]: value = "task-3240514" [ 985.564821] env[68674]: _type = "Task" [ 985.564821] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.572987] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240508, 'name': ReconfigVM_Task, 'duration_secs': 0.382289} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.573975] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Reconfigured VM instance instance-00000041 to attach disk [datastore1] 63d6c185-db2c-4ede-a716-9a0dd432ab1f/63d6c185-db2c-4ede-a716-9a0dd432ab1f.vmdk or device None with type streamOptimized {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 985.575973] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'device_type': 'disk', 'size': 0, 'encryption_format': None, 'device_name': '/dev/sda', 'encrypted': False, 'boot_index': 0, 'disk_bus': None, 'encryption_options': None, 'guest_format': None, 'encryption_secret_uuid': None, 'image_id': 'b84d9354-ef6b-46ca-9dae-6549fa89bbea'}], 'ephemerals': [], 'block_device_mapping': [{'device_type': None, 'attachment_id': '537fb4ee-1f3e-47c4-9a07-7126b0159a5c', 'mount_device': '/dev/sdb', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647612', 'volume_id': '268d6a73-58d5-4541-bfb5-15e693956d5f', 'name': 'volume-268d6a73-58d5-4541-bfb5-15e693956d5f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '63d6c185-db2c-4ede-a716-9a0dd432ab1f', 'attached_at': '', 'detached_at': '', 'volume_id': '268d6a73-58d5-4541-bfb5-15e693956d5f', 'serial': '268d6a73-58d5-4541-bfb5-15e693956d5f'}, 'boot_index': None, 'disk_bus': None, 'guest_format': None, 'delete_on_termination': False, 'volume_type': None}], 'swap': None} {{(pid=68674) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 985.576237] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Volume attach. Driver type: vmdk {{(pid=68674) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 985.576446] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647612', 'volume_id': '268d6a73-58d5-4541-bfb5-15e693956d5f', 'name': 'volume-268d6a73-58d5-4541-bfb5-15e693956d5f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '63d6c185-db2c-4ede-a716-9a0dd432ab1f', 'attached_at': '', 'detached_at': '', 'volume_id': '268d6a73-58d5-4541-bfb5-15e693956d5f', 'serial': '268d6a73-58d5-4541-bfb5-15e693956d5f'} {{(pid=68674) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 985.582664] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbcf81e1-b72a-40c6-b9dd-a96de6f7d864 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.586796] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240514, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.617020] env[68674]: DEBUG oslo_vmware.api [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240509, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.617802] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b95d87ba-1f86-45dc-b1a9-c340ab131a12 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.649951] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] volume-268d6a73-58d5-4541-bfb5-15e693956d5f/volume-268d6a73-58d5-4541-bfb5-15e693956d5f.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 985.649951] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-72d43d94-a8a2-48c8-8bb4-9c3cc22242ff {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.674066] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 985.674066] env[68674]: value = "task-3240515" [ 985.674066] env[68674]: _type = "Task" [ 985.674066] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.686777] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240515, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.947154] env[68674]: DEBUG oslo_vmware.api [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240512, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.395244} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.949021] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 985.949021] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0062d21-9cd7-40e9-8e0f-a84f9f963a9f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.985476] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] 2a7a6269-65a8-402c-b174-a4a46d20a33a/2a7a6269-65a8-402c-b174-a4a46d20a33a.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 985.989332] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a83719d5-1b8f-4c82-96af-afda8e1df3fb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.004901] env[68674]: DEBUG oslo_vmware.api [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Task: {'id': task-3240507, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.364104} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.009011] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] fa8c58b7-a462-437f-b1ed-57fef6aa3903/fa8c58b7-a462-437f-b1ed-57fef6aa3903.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 986.009208] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 986.010227] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e6d3f76a-88a5-4cf1-bff0-e550f4cbd27b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.017673] env[68674]: DEBUG oslo_vmware.api [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 986.017673] env[68674]: value = "task-3240518" [ 986.017673] env[68674]: _type = "Task" [ 986.017673] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.024588] env[68674]: DEBUG oslo_vmware.api [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Waiting for the task: (returnval){ [ 986.024588] env[68674]: value = "task-3240519" [ 986.024588] env[68674]: _type = "Task" [ 986.024588] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.038694] env[68674]: DEBUG oslo_vmware.api [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240518, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.044362] env[68674]: DEBUG oslo_vmware.api [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Task: {'id': task-3240519, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.064436] env[68674]: DEBUG oslo_concurrency.lockutils [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.607s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 986.065046] env[68674]: DEBUG nova.compute.manager [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 986.069496] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fb8808ce-b3eb-4cfe-be3a-811ccd535358 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.639s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 986.069739] env[68674]: DEBUG nova.objects.instance [None req-fb8808ce-b3eb-4cfe-be3a-811ccd535358 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lazy-loading 'resources' on Instance uuid 6af32e52-f10e-47be-ab36-e130614ba9e8 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 986.082463] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240514, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.467136} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.082716] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 986.082917] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 986.083089] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 986.099252] env[68674]: DEBUG oslo_vmware.api [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240509, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.187415] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240515, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.512595] env[68674]: DEBUG nova.compute.manager [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 986.540104] env[68674]: DEBUG oslo_vmware.api [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Task: {'id': task-3240519, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.103649} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.543925] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 986.544337] env[68674]: DEBUG oslo_vmware.api [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240518, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.545132] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1ed10e3-ee5a-49c6-9f15-6f34369d58ae {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.578850] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] fa8c58b7-a462-437f-b1ed-57fef6aa3903/fa8c58b7-a462-437f-b1ed-57fef6aa3903.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 986.581392] env[68674]: DEBUG nova.virt.hardware [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 986.581651] env[68674]: DEBUG nova.virt.hardware [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 986.581809] env[68674]: DEBUG nova.virt.hardware [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 986.581990] env[68674]: DEBUG nova.virt.hardware [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 986.582156] env[68674]: DEBUG nova.virt.hardware [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 986.582495] env[68674]: DEBUG nova.virt.hardware [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 986.582537] env[68674]: DEBUG nova.virt.hardware [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 986.582669] env[68674]: DEBUG nova.virt.hardware [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 986.582839] env[68674]: DEBUG nova.virt.hardware [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 986.582998] env[68674]: DEBUG nova.virt.hardware [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 986.584095] env[68674]: DEBUG nova.virt.hardware [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 986.584556] env[68674]: DEBUG nova.compute.utils [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 986.589042] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a06eb12b-5221-4c33-9fc7-80a71c577fac {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.606516] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0d93fb4-a130-46b7-be3a-1e979a82ad9c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.612211] env[68674]: DEBUG nova.compute.manager [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 986.612211] env[68674]: DEBUG nova.network.neutron [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 986.615878] env[68674]: INFO nova.virt.block_device [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Booting with volume 2110fa31-61e9-4ce5-a495-f1f566fee58d at /dev/sdb [ 986.634253] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ed0554f-ef0c-496c-8523-a769bd0bf761 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.639170] env[68674]: DEBUG oslo_vmware.api [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Waiting for the task: (returnval){ [ 986.639170] env[68674]: value = "task-3240520" [ 986.639170] env[68674]: _type = "Task" [ 986.639170] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.639412] env[68674]: DEBUG oslo_vmware.api [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240509, 'name': CloneVM_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.663052] env[68674]: DEBUG oslo_vmware.api [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Task: {'id': task-3240520, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.676037] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-75f4ece4-cc7a-4fdd-bdb0-85ae7cef3364 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.701507] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240515, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.705159] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ea81a3-b111-4c9d-9ef9-d49681d9464a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.750751] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8551a38c-bef5-4512-9ae7-1ab5b55c26c6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.755762] env[68674]: DEBUG nova.policy [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f60721e0c7d04d87b9deacce82e06403', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4f68fc55ea344ea6961e31e5d57736a1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 986.762653] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93f81b4b-5bb1-4f69-93c8-645edf8f1335 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.807833] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14a559e3-50ba-4e36-a8e7-3d245d8f0666 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.816046] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b2efd5e-22f6-492b-87c9-721031d6e928 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.832935] env[68674]: DEBUG nova.compute.manager [req-ca4a9118-0e03-465f-bf93-811b49431df4 req-aa8a6ad5-310d-46f1-b22d-40f88cd2bcbf service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Received event network-vif-deleted-7d9b4902-f03b-4046-b4ba-0bc1296918da {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 986.833161] env[68674]: INFO nova.compute.manager [req-ca4a9118-0e03-465f-bf93-811b49431df4 req-aa8a6ad5-310d-46f1-b22d-40f88cd2bcbf service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Neutron deleted interface 7d9b4902-f03b-4046-b4ba-0bc1296918da; detaching it from the instance and deleting it from the info cache [ 986.833372] env[68674]: DEBUG nova.network.neutron [req-ca4a9118-0e03-465f-bf93-811b49431df4 req-aa8a6ad5-310d-46f1-b22d-40f88cd2bcbf service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 986.839113] env[68674]: DEBUG nova.virt.block_device [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Updating existing volume attachment record: dc3d0db4-641f-4701-8e75-71558aa36cce {{(pid=68674) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 986.996418] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89b510d4-8057-4582-b406-d76d8e5be807 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.005219] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42788359-f69d-4828-a98e-b91b3e0e03da {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.038577] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-690c8aa4-291f-40b7-bab3-0d077db54701 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.048717] env[68674]: DEBUG oslo_vmware.api [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240518, 'name': ReconfigVM_Task, 'duration_secs': 0.684817} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.050898] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Reconfigured VM instance instance-00000058 to attach disk [datastore1] 2a7a6269-65a8-402c-b174-a4a46d20a33a/2a7a6269-65a8-402c-b174-a4a46d20a33a.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 987.051599] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ee83c94b-4205-4a23-8898-3d887864f74a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.056019] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-373fa43e-4289-420f-aafb-b33a659d8814 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.070329] env[68674]: DEBUG nova.compute.provider_tree [None req-fb8808ce-b3eb-4cfe-be3a-811ccd535358 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 987.074138] env[68674]: DEBUG oslo_vmware.api [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 987.074138] env[68674]: value = "task-3240521" [ 987.074138] env[68674]: _type = "Task" [ 987.074138] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.082584] env[68674]: DEBUG oslo_vmware.api [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240521, 'name': Rename_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.111467] env[68674]: DEBUG nova.compute.manager [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 987.128411] env[68674]: DEBUG oslo_vmware.api [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240509, 'name': CloneVM_Task, 'duration_secs': 1.568866} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.128411] env[68674]: INFO nova.virt.vmwareapi.vmops [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Created linked-clone VM from snapshot [ 987.128411] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a795b60f-9016-4c56-9c9a-9921de2ad7f4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.136482] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Uploading image e1a081da-3f4c-4a57-ab24-e5928d86e493 {{(pid=68674) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 987.151015] env[68674]: DEBUG oslo_vmware.api [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Task: {'id': task-3240520, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.168463] env[68674]: DEBUG oslo_vmware.rw_handles [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 987.168463] env[68674]: value = "vm-647635" [ 987.168463] env[68674]: _type = "VirtualMachine" [ 987.168463] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 987.168736] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-7fd23849-70ce-4a45-8e04-2177fb76d547 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.178059] env[68674]: DEBUG oslo_vmware.rw_handles [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Lease: (returnval){ [ 987.178059] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52483a42-1ac8-b277-c57b-7b885be86900" [ 987.178059] env[68674]: _type = "HttpNfcLease" [ 987.178059] env[68674]: } obtained for exporting VM: (result){ [ 987.178059] env[68674]: value = "vm-647635" [ 987.178059] env[68674]: _type = "VirtualMachine" [ 987.178059] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 987.178432] env[68674]: DEBUG oslo_vmware.api [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Waiting for the lease: (returnval){ [ 987.178432] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52483a42-1ac8-b277-c57b-7b885be86900" [ 987.178432] env[68674]: _type = "HttpNfcLease" [ 987.178432] env[68674]: } to be ready. {{(pid=68674) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 987.189314] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 987.189314] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52483a42-1ac8-b277-c57b-7b885be86900" [ 987.189314] env[68674]: _type = "HttpNfcLease" [ 987.189314] env[68674]: } is initializing. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 987.193111] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240515, 'name': ReconfigVM_Task, 'duration_secs': 1.037187} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.193111] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Reconfigured VM instance instance-00000041 to attach disk [datastore1] volume-268d6a73-58d5-4541-bfb5-15e693956d5f/volume-268d6a73-58d5-4541-bfb5-15e693956d5f.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 987.198355] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-28c4af76-79f3-4dff-ab14-9b67f31d6a77 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.217447] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 987.217447] env[68674]: value = "task-3240523" [ 987.217447] env[68674]: _type = "Task" [ 987.217447] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.226546] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240523, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.323011] env[68674]: DEBUG nova.network.neutron [-] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 987.339396] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9c5042f0-4014-4895-8208-d800512ca317 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.352474] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02851932-3529-46df-b690-9a97789ee7cd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.392159] env[68674]: DEBUG nova.compute.manager [req-ca4a9118-0e03-465f-bf93-811b49431df4 req-aa8a6ad5-310d-46f1-b22d-40f88cd2bcbf service nova] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Detach interface failed, port_id=7d9b4902-f03b-4046-b4ba-0bc1296918da, reason: Instance 0e7c5243-ad33-4391-8977-b9019643e3de could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 987.392864] env[68674]: DEBUG nova.network.neutron [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Successfully created port: 90c050b9-8e8d-439c-b41f-874750aa76ab {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 987.578021] env[68674]: DEBUG nova.scheduler.client.report [None req-fb8808ce-b3eb-4cfe-be3a-811ccd535358 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 987.590983] env[68674]: DEBUG oslo_vmware.api [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240521, 'name': Rename_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.654134] env[68674]: DEBUG oslo_vmware.api [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Task: {'id': task-3240520, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.692493] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 987.692493] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52483a42-1ac8-b277-c57b-7b885be86900" [ 987.692493] env[68674]: _type = "HttpNfcLease" [ 987.692493] env[68674]: } is ready. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 987.692849] env[68674]: DEBUG oslo_vmware.rw_handles [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 987.692849] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52483a42-1ac8-b277-c57b-7b885be86900" [ 987.692849] env[68674]: _type = "HttpNfcLease" [ 987.692849] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 987.693683] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30e7d14d-cb3c-4801-9cee-fd2dd36ccdb8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.703483] env[68674]: DEBUG oslo_vmware.rw_handles [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52233f04-91fe-1bb3-2b6e-259ece069ec1/disk-0.vmdk from lease info. {{(pid=68674) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 987.703716] env[68674]: DEBUG oslo_vmware.rw_handles [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52233f04-91fe-1bb3-2b6e-259ece069ec1/disk-0.vmdk for reading. {{(pid=68674) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 987.784136] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240523, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.808372] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-c84c4b66-ba87-41f4-9e9a-afc6c5cabd99 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.827690] env[68674]: INFO nova.compute.manager [-] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Took 2.41 seconds to deallocate network for instance. [ 988.066718] env[68674]: DEBUG nova.network.neutron [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Successfully updated port: 7eeedd9b-3b07-43ac-a36b-37a42aa42280 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 988.085710] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fb8808ce-b3eb-4cfe-be3a-811ccd535358 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.016s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 988.089085] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 16.035s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 988.098045] env[68674]: DEBUG oslo_vmware.api [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240521, 'name': Rename_Task, 'duration_secs': 0.767093} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.098481] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 988.098787] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ee048585-20c8-4ced-a997-658b88f239e5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.110449] env[68674]: DEBUG oslo_vmware.api [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 988.110449] env[68674]: value = "task-3240525" [ 988.110449] env[68674]: _type = "Task" [ 988.110449] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.111489] env[68674]: INFO nova.scheduler.client.report [None req-fb8808ce-b3eb-4cfe-be3a-811ccd535358 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Deleted allocations for instance 6af32e52-f10e-47be-ab36-e130614ba9e8 [ 988.120599] env[68674]: DEBUG nova.compute.manager [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 988.132591] env[68674]: DEBUG oslo_vmware.api [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240525, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.150167] env[68674]: DEBUG nova.virt.hardware [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 988.150342] env[68674]: DEBUG nova.virt.hardware [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 988.150501] env[68674]: DEBUG nova.virt.hardware [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 988.150668] env[68674]: DEBUG nova.virt.hardware [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 988.150912] env[68674]: DEBUG nova.virt.hardware [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 988.150998] env[68674]: DEBUG nova.virt.hardware [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 988.151688] env[68674]: DEBUG nova.virt.hardware [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 988.152029] env[68674]: DEBUG nova.virt.hardware [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 988.152501] env[68674]: DEBUG nova.virt.hardware [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 988.153713] env[68674]: DEBUG nova.virt.hardware [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 988.153816] env[68674]: DEBUG nova.virt.hardware [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 988.155201] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b74bd6ac-de33-4822-99d1-d1d3d425c122 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.164683] env[68674]: DEBUG oslo_vmware.api [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Task: {'id': task-3240520, 'name': ReconfigVM_Task, 'duration_secs': 1.313275} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.165821] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Reconfigured VM instance instance-00000059 to attach disk [datastore1] fa8c58b7-a462-437f-b1ed-57fef6aa3903/fa8c58b7-a462-437f-b1ed-57fef6aa3903.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 988.166092] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a3ade319-748f-49a2-bb32-7a3566396407 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.173093] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-208fddb7-84d8-4769-8a18-1fad63e2b764 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.180248] env[68674]: DEBUG oslo_vmware.api [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Waiting for the task: (returnval){ [ 988.180248] env[68674]: value = "task-3240526" [ 988.180248] env[68674]: _type = "Task" [ 988.180248] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.202075] env[68674]: DEBUG oslo_vmware.api [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Task: {'id': task-3240526, 'name': Rename_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.285265] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240523, 'name': ReconfigVM_Task, 'duration_secs': 0.595341} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.285714] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647612', 'volume_id': '268d6a73-58d5-4541-bfb5-15e693956d5f', 'name': 'volume-268d6a73-58d5-4541-bfb5-15e693956d5f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '63d6c185-db2c-4ede-a716-9a0dd432ab1f', 'attached_at': '', 'detached_at': '', 'volume_id': '268d6a73-58d5-4541-bfb5-15e693956d5f', 'serial': '268d6a73-58d5-4541-bfb5-15e693956d5f'} {{(pid=68674) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 988.286405] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3aac6e8d-daaa-45dc-8a4c-cd3c3ce79f62 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.296339] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 988.296339] env[68674]: value = "task-3240527" [ 988.296339] env[68674]: _type = "Task" [ 988.296339] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.307746] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240527, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.336551] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b40cd1b7-3f28-45aa-9d60-843bfbc5fe30 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 988.388750] env[68674]: DEBUG oslo_concurrency.lockutils [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "30731a3c-34ba-40c8-9b8f-2d867eff4f21" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 988.388952] env[68674]: DEBUG oslo_concurrency.lockutils [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "30731a3c-34ba-40c8-9b8f-2d867eff4f21" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 988.568581] env[68674]: DEBUG oslo_concurrency.lockutils [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Acquiring lock "refresh_cache-bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.568951] env[68674]: DEBUG oslo_concurrency.lockutils [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Acquired lock "refresh_cache-bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 988.568951] env[68674]: DEBUG nova.network.neutron [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 988.626321] env[68674]: DEBUG oslo_vmware.api [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240525, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.626874] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fb8808ce-b3eb-4cfe-be3a-811ccd535358 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "6af32e52-f10e-47be-ab36-e130614ba9e8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.225s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 988.692953] env[68674]: DEBUG oslo_vmware.api [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Task: {'id': task-3240526, 'name': Rename_Task, 'duration_secs': 0.169739} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.693423] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 988.693738] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-df3bef5c-a9c5-4860-9f36-c088d0ac69a0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.702363] env[68674]: DEBUG oslo_vmware.api [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Waiting for the task: (returnval){ [ 988.702363] env[68674]: value = "task-3240528" [ 988.702363] env[68674]: _type = "Task" [ 988.702363] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.712152] env[68674]: DEBUG oslo_vmware.api [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Task: {'id': task-3240528, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.808338] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240527, 'name': Rename_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.892236] env[68674]: DEBUG nova.compute.manager [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 988.986027] env[68674]: DEBUG nova.virt.hardware [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 988.986027] env[68674]: DEBUG nova.virt.hardware [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 988.986237] env[68674]: DEBUG nova.virt.hardware [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 988.986420] env[68674]: DEBUG nova.virt.hardware [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 988.986524] env[68674]: DEBUG nova.virt.hardware [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 988.986669] env[68674]: DEBUG nova.virt.hardware [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 988.986878] env[68674]: DEBUG nova.virt.hardware [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 988.987050] env[68674]: DEBUG nova.virt.hardware [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 988.987209] env[68674]: DEBUG nova.virt.hardware [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 988.987368] env[68674]: DEBUG nova.virt.hardware [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 988.987580] env[68674]: DEBUG nova.virt.hardware [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 988.988531] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8554664e-0810-495c-a2c2-9362cfc2dcbf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.998657] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b91d9da7-138a-4161-8743-d5e333471fc8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.017769] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:44:6b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b98c49ac-0eb7-4311-aa8f-60581b2ce706', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e6ae43a2-a24c-4187-aba3-c546140142b9', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 989.026050] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 989.027809] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 989.028857] env[68674]: DEBUG nova.compute.manager [req-4e62db83-25ae-4d2f-935f-6356914b18f8 req-839c4ac8-6459-4df7-9c9c-a1619b751e83 service nova] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Received event network-vif-plugged-7eeedd9b-3b07-43ac-a36b-37a42aa42280 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 989.029083] env[68674]: DEBUG oslo_concurrency.lockutils [req-4e62db83-25ae-4d2f-935f-6356914b18f8 req-839c4ac8-6459-4df7-9c9c-a1619b751e83 service nova] Acquiring lock "bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 989.029485] env[68674]: DEBUG oslo_concurrency.lockutils [req-4e62db83-25ae-4d2f-935f-6356914b18f8 req-839c4ac8-6459-4df7-9c9c-a1619b751e83 service nova] Lock "bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 989.029722] env[68674]: DEBUG oslo_concurrency.lockutils [req-4e62db83-25ae-4d2f-935f-6356914b18f8 req-839c4ac8-6459-4df7-9c9c-a1619b751e83 service nova] Lock "bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 989.029980] env[68674]: DEBUG nova.compute.manager [req-4e62db83-25ae-4d2f-935f-6356914b18f8 req-839c4ac8-6459-4df7-9c9c-a1619b751e83 service nova] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] No waiting events found dispatching network-vif-plugged-7eeedd9b-3b07-43ac-a36b-37a42aa42280 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 989.030246] env[68674]: WARNING nova.compute.manager [req-4e62db83-25ae-4d2f-935f-6356914b18f8 req-839c4ac8-6459-4df7-9c9c-a1619b751e83 service nova] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Received unexpected event network-vif-plugged-7eeedd9b-3b07-43ac-a36b-37a42aa42280 for instance with vm_state building and task_state spawning. [ 989.030438] env[68674]: DEBUG nova.compute.manager [req-4e62db83-25ae-4d2f-935f-6356914b18f8 req-839c4ac8-6459-4df7-9c9c-a1619b751e83 service nova] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Received event network-changed-7eeedd9b-3b07-43ac-a36b-37a42aa42280 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 989.030607] env[68674]: DEBUG nova.compute.manager [req-4e62db83-25ae-4d2f-935f-6356914b18f8 req-839c4ac8-6459-4df7-9c9c-a1619b751e83 service nova] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Refreshing instance network info cache due to event network-changed-7eeedd9b-3b07-43ac-a36b-37a42aa42280. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 989.030849] env[68674]: DEBUG oslo_concurrency.lockutils [req-4e62db83-25ae-4d2f-935f-6356914b18f8 req-839c4ac8-6459-4df7-9c9c-a1619b751e83 service nova] Acquiring lock "refresh_cache-bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.031111] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1ebe870f-4d79-4113-8593-392189d91cf4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.055045] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 989.055045] env[68674]: value = "task-3240529" [ 989.055045] env[68674]: _type = "Task" [ 989.055045] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.066847] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240529, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.104093] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Applying migration context for instance ffdd1c62-1b4e-40cf-a27e-ff2877439701 as it has an incoming, in-progress migration 8ebdc340-6af1-4ea5-99e1-3cf57304ed57. Migration status is confirming {{(pid=68674) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 989.106527] env[68674]: INFO nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Updating resource usage from migration 8ebdc340-6af1-4ea5-99e1-3cf57304ed57 [ 989.122658] env[68674]: DEBUG nova.network.neutron [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 989.134649] env[68674]: DEBUG oslo_vmware.api [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240525, 'name': PowerOnVM_Task, 'duration_secs': 0.605811} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.134975] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 989.135222] env[68674]: INFO nova.compute.manager [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Took 12.59 seconds to spawn the instance on the hypervisor. [ 989.135404] env[68674]: DEBUG nova.compute.manager [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 989.136527] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aff268b-2df4-4fc4-af3f-a2039c484ec0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.142360] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 23891bad-1b63-4237-9243-78954cf67d52 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 989.142712] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance f029042f-d80b-453e-adc9-1e65d7da7aaf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 989.142712] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 5c12cb5d-821c-4e63-86a0-dadc9794a8ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 989.142712] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance e371ae6b-44fd-47ce-9c58-8981e7da5cbc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 989.142712] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 7d953e59-53c1-4041-a641-35c12c012f7e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 989.143803] env[68674]: WARNING nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 0e7c5243-ad33-4391-8977-b9019643e3de is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 989.143927] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 2d02adff-9fbf-4889-99e4-4efde5a51b33 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 989.144170] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance f70145c9-4846-42e1-9c1c-de9759097abd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 989.146669] env[68674]: WARNING nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance e894cd36-95c8-473b-9bbd-483f11fb5add is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 989.146669] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 63d6c185-db2c-4ede-a716-9a0dd432ab1f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 989.146669] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 989.146669] env[68674]: WARNING nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 691f9f14-4f53-46a4-8bf7-d027cfdd37e8 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 989.146669] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Migration 8ebdc340-6af1-4ea5-99e1-3cf57304ed57 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 989.146669] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance ffdd1c62-1b4e-40cf-a27e-ff2877439701 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 989.146669] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 2a7a6269-65a8-402c-b174-a4a46d20a33a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 989.146669] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance fa8c58b7-a462-437f-b1ed-57fef6aa3903 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 989.146669] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 989.146669] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 7a13c52a-328a-4baa-827f-4f2e9cd29269 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 989.216211] env[68674]: DEBUG oslo_vmware.api [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Task: {'id': task-3240528, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.282193] env[68674]: DEBUG nova.network.neutron [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Successfully updated port: 90c050b9-8e8d-439c-b41f-874750aa76ab {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 989.310138] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240527, 'name': Rename_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.362817] env[68674]: DEBUG nova.network.neutron [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Updating instance_info_cache with network_info: [{"id": "7eeedd9b-3b07-43ac-a36b-37a42aa42280", "address": "fa:16:3e:37:40:ba", "network": {"id": "56986b75-3b31-44ca-946d-e623f79181d9", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-880345714-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75837b2623e94e5b898b49a24f9d842e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7eeedd9b-3b", "ovs_interfaceid": "7eeedd9b-3b07-43ac-a36b-37a42aa42280", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.416012] env[68674]: DEBUG oslo_concurrency.lockutils [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 989.567043] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240529, 'name': CreateVM_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.649281] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance f6f5fb73-521a-4c83-93ea-a1eb2af2e142 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 989.672193] env[68674]: INFO nova.compute.manager [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Took 31.40 seconds to build instance. [ 989.681550] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "8d810cc0-3f85-49c9-9d7d-8e1711a97015" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 989.681789] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "8d810cc0-3f85-49c9-9d7d-8e1711a97015" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 989.714050] env[68674]: DEBUG oslo_vmware.api [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Task: {'id': task-3240528, 'name': PowerOnVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.786349] env[68674]: DEBUG oslo_concurrency.lockutils [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Acquiring lock "refresh_cache-7a13c52a-328a-4baa-827f-4f2e9cd29269" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.786349] env[68674]: DEBUG oslo_concurrency.lockutils [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Acquired lock "refresh_cache-7a13c52a-328a-4baa-827f-4f2e9cd29269" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 989.786349] env[68674]: DEBUG nova.network.neutron [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 989.811244] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240527, 'name': Rename_Task, 'duration_secs': 1.217219} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.812037] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 989.812496] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6e1ed7a5-90d7-4cd6-835b-49e4f74e2bd7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.821358] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 989.821358] env[68674]: value = "task-3240530" [ 989.821358] env[68674]: _type = "Task" [ 989.821358] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.830834] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240530, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.868366] env[68674]: DEBUG oslo_concurrency.lockutils [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Releasing lock "refresh_cache-bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 989.868870] env[68674]: DEBUG nova.compute.manager [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Instance network_info: |[{"id": "7eeedd9b-3b07-43ac-a36b-37a42aa42280", "address": "fa:16:3e:37:40:ba", "network": {"id": "56986b75-3b31-44ca-946d-e623f79181d9", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-880345714-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75837b2623e94e5b898b49a24f9d842e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7eeedd9b-3b", "ovs_interfaceid": "7eeedd9b-3b07-43ac-a36b-37a42aa42280", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 989.869275] env[68674]: DEBUG oslo_concurrency.lockutils [req-4e62db83-25ae-4d2f-935f-6356914b18f8 req-839c4ac8-6459-4df7-9c9c-a1619b751e83 service nova] Acquired lock "refresh_cache-bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 989.869528] env[68674]: DEBUG nova.network.neutron [req-4e62db83-25ae-4d2f-935f-6356914b18f8 req-839c4ac8-6459-4df7-9c9c-a1619b751e83 service nova] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Refreshing network info cache for port 7eeedd9b-3b07-43ac-a36b-37a42aa42280 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 989.871472] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:37:40:ba', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db1f7867-8524-469c-ab47-d2c9e2751d98', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7eeedd9b-3b07-43ac-a36b-37a42aa42280', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 989.879071] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Creating folder: Project (75837b2623e94e5b898b49a24f9d842e). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 989.882629] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-50a3dc63-723c-4541-8fac-99bb614f5f36 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.884749] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5b71d827-149a-4384-bf35-53aeab5d796a tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquiring lock "7d953e59-53c1-4041-a641-35c12c012f7e" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 989.885048] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5b71d827-149a-4384-bf35-53aeab5d796a tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lock "7d953e59-53c1-4041-a641-35c12c012f7e" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 989.899665] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Created folder: Project (75837b2623e94e5b898b49a24f9d842e) in parent group-v647377. [ 989.899838] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Creating folder: Instances. Parent ref: group-v647639. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 989.900455] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b623a523-588e-4a3e-b440-33af0b7c41ca {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.900455] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b623a523-588e-4a3e-b440-33af0b7c41ca {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.938064] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Created folder: Instances in parent group-v647639. [ 989.938064] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 989.938064] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 989.938064] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e8a7e0b2-811d-461d-9b19-cb4e7d6eeffe {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.941746] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 989.941746] env[68674]: value = "task-3240533" [ 989.941746] env[68674]: _type = "Task" [ 989.941746] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.951734] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240533, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.062797] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-431fdfd1-bc0b-4766-8545-c92548b8ec1a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Volume attach. Driver type: vmdk {{(pid=68674) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 990.062797] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-431fdfd1-bc0b-4766-8545-c92548b8ec1a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647637', 'volume_id': '09b1b6e3-ad70-4884-a142-77859302b0e3', 'name': 'volume-09b1b6e3-ad70-4884-a142-77859302b0e3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f70145c9-4846-42e1-9c1c-de9759097abd', 'attached_at': '', 'detached_at': '', 'volume_id': '09b1b6e3-ad70-4884-a142-77859302b0e3', 'serial': '09b1b6e3-ad70-4884-a142-77859302b0e3'} {{(pid=68674) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 990.063951] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cc9cc43-d6e1-401f-b0b5-b15ea690112e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.073419] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240529, 'name': CreateVM_Task, 'duration_secs': 0.687989} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.084764] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 990.086399] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.086399] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 990.086399] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 990.087151] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1167b3c-1fd4-471a-9aec-2856a34d780c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.090110] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86726dfa-1623-4ccc-8450-39a21d0d0498 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.096156] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 990.096156] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525f5efd-f8b7-dcf7-fcad-261fc41d5909" [ 990.096156] env[68674]: _type = "Task" [ 990.096156] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.119956] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-431fdfd1-bc0b-4766-8545-c92548b8ec1a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] volume-09b1b6e3-ad70-4884-a142-77859302b0e3/volume-09b1b6e3-ad70-4884-a142-77859302b0e3.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 990.127041] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6218046d-b476-4530-b5d9-ea702be24be6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.149284] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525f5efd-f8b7-dcf7-fcad-261fc41d5909, 'name': SearchDatastore_Task, 'duration_secs': 0.015406} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.150593] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 990.150900] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 990.151232] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.151454] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 990.151693] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 990.152091] env[68674]: DEBUG oslo_vmware.api [None req-431fdfd1-bc0b-4766-8545-c92548b8ec1a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 990.152091] env[68674]: value = "task-3240534" [ 990.152091] env[68674]: _type = "Task" [ 990.152091] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.152820] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 30731a3c-34ba-40c8-9b8f-2d867eff4f21 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 990.153134] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Total usable vcpus: 48, total allocated vcpus: 15 {{(pid=68674) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 990.153389] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3456MB phys_disk=200GB used_disk=15GB total_vcpus=48 used_vcpus=15 pci_stats=[] {{(pid=68674) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 990.155960] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dd0ee727-5de8-4312-b85e-62edcc71bad2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.174373] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2c8ff852-38b5-4363-8bbb-b106916fa471 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "2a7a6269-65a8-402c-b174-a4a46d20a33a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.911s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 990.174684] env[68674]: DEBUG oslo_vmware.api [None req-431fdfd1-bc0b-4766-8545-c92548b8ec1a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240534, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.176818] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 990.176818] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 990.177286] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6cfb7fa5-e156-4789-9df3-58204b565310 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.184815] env[68674]: DEBUG nova.compute.manager [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 990.188875] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 990.188875] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d316be-e512-49c4-6d3d-2fbc2bdad139" [ 990.188875] env[68674]: _type = "Task" [ 990.188875] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.207985] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d316be-e512-49c4-6d3d-2fbc2bdad139, 'name': SearchDatastore_Task, 'duration_secs': 0.015481} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.214896] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-823f7986-f26b-4512-9844-a52c2bbe1498 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.223239] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 990.223239] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5220dfea-bd9c-6fc8-ad5f-926e36cb587b" [ 990.223239] env[68674]: _type = "Task" [ 990.223239] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.226920] env[68674]: DEBUG oslo_vmware.api [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Task: {'id': task-3240528, 'name': PowerOnVM_Task, 'duration_secs': 1.102664} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.230566] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 990.230842] env[68674]: INFO nova.compute.manager [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Took 11.23 seconds to spawn the instance on the hypervisor. [ 990.231079] env[68674]: DEBUG nova.compute.manager [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 990.231994] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0a9f3d1-ab56-4cc8-ac42-4842ea2b90d1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.242747] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5220dfea-bd9c-6fc8-ad5f-926e36cb587b, 'name': SearchDatastore_Task, 'duration_secs': 0.016625} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.251347] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 990.251757] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 2d02adff-9fbf-4889-99e4-4efde5a51b33/2d02adff-9fbf-4889-99e4-4efde5a51b33.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 990.256341] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2368787b-2d8b-4002-8347-3ac51103c842 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.268756] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 990.268756] env[68674]: value = "task-3240535" [ 990.268756] env[68674]: _type = "Task" [ 990.268756] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.284195] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240535, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.339573] env[68674]: DEBUG nova.network.neutron [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 990.351568] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240530, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.389248] env[68674]: INFO nova.compute.manager [None req-5b71d827-149a-4384-bf35-53aeab5d796a tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Detaching volume f5c1aae5-59a0-408e-93e1-e7b8fbdd4a32 [ 990.439573] env[68674]: DEBUG nova.network.neutron [req-4e62db83-25ae-4d2f-935f-6356914b18f8 req-839c4ac8-6459-4df7-9c9c-a1619b751e83 service nova] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Updated VIF entry in instance network info cache for port 7eeedd9b-3b07-43ac-a36b-37a42aa42280. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 990.439573] env[68674]: DEBUG nova.network.neutron [req-4e62db83-25ae-4d2f-935f-6356914b18f8 req-839c4ac8-6459-4df7-9c9c-a1619b751e83 service nova] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Updating instance_info_cache with network_info: [{"id": "7eeedd9b-3b07-43ac-a36b-37a42aa42280", "address": "fa:16:3e:37:40:ba", "network": {"id": "56986b75-3b31-44ca-946d-e623f79181d9", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-880345714-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75837b2623e94e5b898b49a24f9d842e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7eeedd9b-3b", "ovs_interfaceid": "7eeedd9b-3b07-43ac-a36b-37a42aa42280", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.441467] env[68674]: INFO nova.virt.block_device [None req-5b71d827-149a-4384-bf35-53aeab5d796a tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Attempting to driver detach volume f5c1aae5-59a0-408e-93e1-e7b8fbdd4a32 from mountpoint /dev/sdb [ 990.441706] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b71d827-149a-4384-bf35-53aeab5d796a tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Volume detach. Driver type: vmdk {{(pid=68674) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 990.441897] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b71d827-149a-4384-bf35-53aeab5d796a tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647601', 'volume_id': 'f5c1aae5-59a0-408e-93e1-e7b8fbdd4a32', 'name': 'volume-f5c1aae5-59a0-408e-93e1-e7b8fbdd4a32', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7d953e59-53c1-4041-a641-35c12c012f7e', 'attached_at': '', 'detached_at': '', 'volume_id': 'f5c1aae5-59a0-408e-93e1-e7b8fbdd4a32', 'serial': 'f5c1aae5-59a0-408e-93e1-e7b8fbdd4a32'} {{(pid=68674) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 990.442884] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4f198b0-aa13-44f7-a341-3f642ea32027 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.461621] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240533, 'name': CreateVM_Task, 'duration_secs': 0.425153} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.478510] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 990.482861] env[68674]: DEBUG oslo_concurrency.lockutils [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.483144] env[68674]: DEBUG oslo_concurrency.lockutils [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 990.483977] env[68674]: DEBUG oslo_concurrency.lockutils [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 990.484266] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56903567-6ea9-4c93-8a46-86916552dce8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.487744] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-478c0421-4930-411e-80e6-e7004b1b8dfa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.499458] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e1a4b39-f560-45e0-b5c1-fd108884590e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.502922] env[68674]: DEBUG oslo_vmware.api [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Waiting for the task: (returnval){ [ 990.502922] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f78809-8cdb-eec9-f2b5-f5b5059ffecd" [ 990.502922] env[68674]: _type = "Task" [ 990.502922] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.536617] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f52d846-1b09-4e49-8715-565e53641a1e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.545586] env[68674]: DEBUG nova.network.neutron [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Updating instance_info_cache with network_info: [{"id": "90c050b9-8e8d-439c-b41f-874750aa76ab", "address": "fa:16:3e:cc:44:d8", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.55", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90c050b9-8e", "ovs_interfaceid": "90c050b9-8e8d-439c-b41f-874750aa76ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.547786] env[68674]: DEBUG oslo_vmware.api [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f78809-8cdb-eec9-f2b5-f5b5059ffecd, 'name': SearchDatastore_Task, 'duration_secs': 0.014301} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.548288] env[68674]: DEBUG oslo_concurrency.lockutils [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 990.548575] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 990.548933] env[68674]: DEBUG oslo_concurrency.lockutils [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.549075] env[68674]: DEBUG oslo_concurrency.lockutils [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 990.549262] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 990.549918] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7093ea84-50ac-4cf8-9711-f76e93803003 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.567123] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b71d827-149a-4384-bf35-53aeab5d796a tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] The volume has not been displaced from its original location: [datastore2] volume-f5c1aae5-59a0-408e-93e1-e7b8fbdd4a32/volume-f5c1aae5-59a0-408e-93e1-e7b8fbdd4a32.vmdk. No consolidation needed. {{(pid=68674) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 990.572641] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b71d827-149a-4384-bf35-53aeab5d796a tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Reconfiguring VM instance instance-00000048 to detach disk 2001 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 990.577067] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b62c8948-999a-46c7-9f5e-04d6c45c24dc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.594504] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 990.594504] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 990.595343] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4343efe-bbf7-4fb6-a4be-44b76b357cf8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.604679] env[68674]: DEBUG oslo_vmware.api [None req-5b71d827-149a-4384-bf35-53aeab5d796a tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 990.604679] env[68674]: value = "task-3240536" [ 990.604679] env[68674]: _type = "Task" [ 990.604679] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.604920] env[68674]: DEBUG oslo_vmware.api [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Waiting for the task: (returnval){ [ 990.604920] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52fb6e03-4f9d-e132-102a-1d9b795855a6" [ 990.604920] env[68674]: _type = "Task" [ 990.604920] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.622838] env[68674]: DEBUG oslo_vmware.api [None req-5b71d827-149a-4384-bf35-53aeab5d796a tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240536, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.627262] env[68674]: DEBUG oslo_vmware.api [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52fb6e03-4f9d-e132-102a-1d9b795855a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.674369] env[68674]: DEBUG oslo_vmware.api [None req-431fdfd1-bc0b-4766-8545-c92548b8ec1a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240534, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.696256] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9141daff-fff6-4633-8702-f4709834e9ff {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.708091] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2164d5cb-20cb-454b-bf3c-43a77d1a9978 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.748995] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 990.749657] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-261b607e-5f33-4bcc-8631-c9080e5ed580 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.763641] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd9e2681-fd8f-4f41-b5ed-0a6aa5a221ba {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.785927] env[68674]: INFO nova.compute.manager [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Took 31.77 seconds to build instance. [ 990.787324] env[68674]: DEBUG nova.compute.provider_tree [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 990.795972] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240535, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.834089] env[68674]: DEBUG oslo_vmware.api [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240530, 'name': PowerOnVM_Task, 'duration_secs': 0.649379} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.834422] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 990.940706] env[68674]: DEBUG oslo_concurrency.lockutils [req-4e62db83-25ae-4d2f-935f-6356914b18f8 req-839c4ac8-6459-4df7-9c9c-a1619b751e83 service nova] Releasing lock "refresh_cache-bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 990.965925] env[68674]: DEBUG nova.compute.manager [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 990.966976] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-899a61d8-6531-43bb-979b-4aa4698ce68d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.051188] env[68674]: DEBUG oslo_concurrency.lockutils [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Releasing lock "refresh_cache-7a13c52a-328a-4baa-827f-4f2e9cd29269" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 991.051568] env[68674]: DEBUG nova.compute.manager [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Instance network_info: |[{"id": "90c050b9-8e8d-439c-b41f-874750aa76ab", "address": "fa:16:3e:cc:44:d8", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.55", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90c050b9-8e", "ovs_interfaceid": "90c050b9-8e8d-439c-b41f-874750aa76ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 991.052386] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cc:44:d8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'df1bf911-aac9-4d2d-ae69-66ace3e6a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '90c050b9-8e8d-439c-b41f-874750aa76ab', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 991.061620] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Creating folder: Project (4f68fc55ea344ea6961e31e5d57736a1). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 991.062038] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f1037866-d04f-42f5-ba8b-4b561781e308 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.078383] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Created folder: Project (4f68fc55ea344ea6961e31e5d57736a1) in parent group-v647377. [ 991.079646] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Creating folder: Instances. Parent ref: group-v647642. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 991.079957] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-296ed9f0-c982-45c3-b487-ace7a934fe57 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.096023] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Created folder: Instances in parent group-v647642. [ 991.096023] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 991.096023] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 991.096023] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5d507f5a-49b2-470e-a16b-64674a170648 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.129321] env[68674]: DEBUG oslo_vmware.api [None req-5b71d827-149a-4384-bf35-53aeab5d796a tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240536, 'name': ReconfigVM_Task, 'duration_secs': 0.493526} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.131782] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b71d827-149a-4384-bf35-53aeab5d796a tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Reconfigured VM instance instance-00000048 to detach disk 2001 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 991.136140] env[68674]: DEBUG oslo_vmware.api [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52fb6e03-4f9d-e132-102a-1d9b795855a6, 'name': SearchDatastore_Task, 'duration_secs': 0.063617} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.136403] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 991.136403] env[68674]: value = "task-3240539" [ 991.136403] env[68674]: _type = "Task" [ 991.136403] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.136687] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d8ac347-faaf-4c3d-ab36-f150d6268e4c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.148700] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2be03753-0d04-47c7-bc26-29cd93ea478e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.162048] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240539, 'name': CreateVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.167784] env[68674]: DEBUG oslo_vmware.api [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Waiting for the task: (returnval){ [ 991.167784] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ce982d-8e0b-d89d-fc56-2dd8a27e9ebb" [ 991.167784] env[68674]: _type = "Task" [ 991.167784] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.168517] env[68674]: DEBUG oslo_vmware.api [None req-5b71d827-149a-4384-bf35-53aeab5d796a tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 991.168517] env[68674]: value = "task-3240540" [ 991.168517] env[68674]: _type = "Task" [ 991.168517] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.179699] env[68674]: DEBUG oslo_vmware.api [None req-431fdfd1-bc0b-4766-8545-c92548b8ec1a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240534, 'name': ReconfigVM_Task, 'duration_secs': 0.792205} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.181209] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-431fdfd1-bc0b-4766-8545-c92548b8ec1a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Reconfigured VM instance instance-00000051 to attach disk [datastore1] volume-09b1b6e3-ad70-4884-a142-77859302b0e3/volume-09b1b6e3-ad70-4884-a142-77859302b0e3.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 991.196483] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-35eda6cb-99a5-4021-8142-7415d3670b71 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.208346] env[68674]: DEBUG oslo_vmware.api [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ce982d-8e0b-d89d-fc56-2dd8a27e9ebb, 'name': SearchDatastore_Task, 'duration_secs': 0.024051} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.208663] env[68674]: DEBUG oslo_vmware.api [None req-5b71d827-149a-4384-bf35-53aeab5d796a tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240540, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.208970] env[68674]: DEBUG oslo_concurrency.lockutils [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 991.209291] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77/bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 991.210066] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-162a5b47-33e7-44ee-af0e-a049d052cccf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.216791] env[68674]: DEBUG oslo_vmware.api [None req-431fdfd1-bc0b-4766-8545-c92548b8ec1a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 991.216791] env[68674]: value = "task-3240541" [ 991.216791] env[68674]: _type = "Task" [ 991.216791] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.218381] env[68674]: DEBUG oslo_vmware.api [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Waiting for the task: (returnval){ [ 991.218381] env[68674]: value = "task-3240542" [ 991.218381] env[68674]: _type = "Task" [ 991.218381] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.233136] env[68674]: DEBUG oslo_vmware.api [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Task: {'id': task-3240542, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.236494] env[68674]: DEBUG oslo_vmware.api [None req-431fdfd1-bc0b-4766-8545-c92548b8ec1a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240541, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.277185] env[68674]: DEBUG nova.compute.manager [req-a04e4778-0e67-415b-99a6-15ad3cc1f043 req-c8741d84-7e29-4b29-b531-4807ce85b0a3 service nova] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Received event network-vif-plugged-90c050b9-8e8d-439c-b41f-874750aa76ab {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 991.277185] env[68674]: DEBUG oslo_concurrency.lockutils [req-a04e4778-0e67-415b-99a6-15ad3cc1f043 req-c8741d84-7e29-4b29-b531-4807ce85b0a3 service nova] Acquiring lock "7a13c52a-328a-4baa-827f-4f2e9cd29269-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 991.277185] env[68674]: DEBUG oslo_concurrency.lockutils [req-a04e4778-0e67-415b-99a6-15ad3cc1f043 req-c8741d84-7e29-4b29-b531-4807ce85b0a3 service nova] Lock "7a13c52a-328a-4baa-827f-4f2e9cd29269-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 991.277493] env[68674]: DEBUG oslo_concurrency.lockutils [req-a04e4778-0e67-415b-99a6-15ad3cc1f043 req-c8741d84-7e29-4b29-b531-4807ce85b0a3 service nova] Lock "7a13c52a-328a-4baa-827f-4f2e9cd29269-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 991.277493] env[68674]: DEBUG nova.compute.manager [req-a04e4778-0e67-415b-99a6-15ad3cc1f043 req-c8741d84-7e29-4b29-b531-4807ce85b0a3 service nova] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] No waiting events found dispatching network-vif-plugged-90c050b9-8e8d-439c-b41f-874750aa76ab {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 991.277704] env[68674]: WARNING nova.compute.manager [req-a04e4778-0e67-415b-99a6-15ad3cc1f043 req-c8741d84-7e29-4b29-b531-4807ce85b0a3 service nova] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Received unexpected event network-vif-plugged-90c050b9-8e8d-439c-b41f-874750aa76ab for instance with vm_state building and task_state spawning. [ 991.279033] env[68674]: DEBUG nova.compute.manager [req-a04e4778-0e67-415b-99a6-15ad3cc1f043 req-c8741d84-7e29-4b29-b531-4807ce85b0a3 service nova] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Received event network-changed-90c050b9-8e8d-439c-b41f-874750aa76ab {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 991.279033] env[68674]: DEBUG nova.compute.manager [req-a04e4778-0e67-415b-99a6-15ad3cc1f043 req-c8741d84-7e29-4b29-b531-4807ce85b0a3 service nova] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Refreshing instance network info cache due to event network-changed-90c050b9-8e8d-439c-b41f-874750aa76ab. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 991.279033] env[68674]: DEBUG oslo_concurrency.lockutils [req-a04e4778-0e67-415b-99a6-15ad3cc1f043 req-c8741d84-7e29-4b29-b531-4807ce85b0a3 service nova] Acquiring lock "refresh_cache-7a13c52a-328a-4baa-827f-4f2e9cd29269" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.279033] env[68674]: DEBUG oslo_concurrency.lockutils [req-a04e4778-0e67-415b-99a6-15ad3cc1f043 req-c8741d84-7e29-4b29-b531-4807ce85b0a3 service nova] Acquired lock "refresh_cache-7a13c52a-328a-4baa-827f-4f2e9cd29269" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 991.279033] env[68674]: DEBUG nova.network.neutron [req-a04e4778-0e67-415b-99a6-15ad3cc1f043 req-c8741d84-7e29-4b29-b531-4807ce85b0a3 service nova] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Refreshing network info cache for port 90c050b9-8e8d-439c-b41f-874750aa76ab {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 991.291380] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0cec1c4f-046c-4323-8a51-3ab76391e83b tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Lock "fa8c58b7-a462-437f-b1ed-57fef6aa3903" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.287s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 991.291774] env[68674]: DEBUG nova.scheduler.client.report [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 991.296809] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240535, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.595529} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.296809] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 2d02adff-9fbf-4889-99e4-4efde5a51b33/2d02adff-9fbf-4889-99e4-4efde5a51b33.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 991.296809] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 991.297683] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f20fb3f9-9f24-4fc6-b703-551a1796c0f5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.310384] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 991.310384] env[68674]: value = "task-3240543" [ 991.310384] env[68674]: _type = "Task" [ 991.310384] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.323617] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240543, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.355417] env[68674]: DEBUG nova.compute.manager [req-c8883d19-b903-40b6-aab6-76476541054a req-339fa0be-1b9e-4e54-a02d-50cb07628817 service nova] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Received event network-changed-4fae8d88-2aaa-48bd-b0c4-72bc768efce3 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 991.355735] env[68674]: DEBUG nova.compute.manager [req-c8883d19-b903-40b6-aab6-76476541054a req-339fa0be-1b9e-4e54-a02d-50cb07628817 service nova] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Refreshing instance network info cache due to event network-changed-4fae8d88-2aaa-48bd-b0c4-72bc768efce3. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 991.355965] env[68674]: DEBUG oslo_concurrency.lockutils [req-c8883d19-b903-40b6-aab6-76476541054a req-339fa0be-1b9e-4e54-a02d-50cb07628817 service nova] Acquiring lock "refresh_cache-2a7a6269-65a8-402c-b174-a4a46d20a33a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.356124] env[68674]: DEBUG oslo_concurrency.lockutils [req-c8883d19-b903-40b6-aab6-76476541054a req-339fa0be-1b9e-4e54-a02d-50cb07628817 service nova] Acquired lock "refresh_cache-2a7a6269-65a8-402c-b174-a4a46d20a33a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 991.356270] env[68674]: DEBUG nova.network.neutron [req-c8883d19-b903-40b6-aab6-76476541054a req-339fa0be-1b9e-4e54-a02d-50cb07628817 service nova] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Refreshing network info cache for port 4fae8d88-2aaa-48bd-b0c4-72bc768efce3 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 991.490602] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e895dca2-9498-499b-b1be-e568d4d2e6f0 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "63d6c185-db2c-4ede-a716-9a0dd432ab1f" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 52.744s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 991.667840] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240539, 'name': CreateVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.685798] env[68674]: DEBUG oslo_vmware.api [None req-5b71d827-149a-4384-bf35-53aeab5d796a tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240540, 'name': ReconfigVM_Task, 'duration_secs': 0.222343} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.687494] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b71d827-149a-4384-bf35-53aeab5d796a tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647601', 'volume_id': 'f5c1aae5-59a0-408e-93e1-e7b8fbdd4a32', 'name': 'volume-f5c1aae5-59a0-408e-93e1-e7b8fbdd4a32', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7d953e59-53c1-4041-a641-35c12c012f7e', 'attached_at': '', 'detached_at': '', 'volume_id': 'f5c1aae5-59a0-408e-93e1-e7b8fbdd4a32', 'serial': 'f5c1aae5-59a0-408e-93e1-e7b8fbdd4a32'} {{(pid=68674) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 991.744759] env[68674]: DEBUG oslo_vmware.api [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Task: {'id': task-3240542, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.745099] env[68674]: DEBUG oslo_vmware.api [None req-431fdfd1-bc0b-4766-8545-c92548b8ec1a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240541, 'name': ReconfigVM_Task, 'duration_secs': 0.219137} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.745466] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-431fdfd1-bc0b-4766-8545-c92548b8ec1a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647637', 'volume_id': '09b1b6e3-ad70-4884-a142-77859302b0e3', 'name': 'volume-09b1b6e3-ad70-4884-a142-77859302b0e3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f70145c9-4846-42e1-9c1c-de9759097abd', 'attached_at': '', 'detached_at': '', 'volume_id': '09b1b6e3-ad70-4884-a142-77859302b0e3', 'serial': '09b1b6e3-ad70-4884-a142-77859302b0e3'} {{(pid=68674) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 991.799150] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68674) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 991.799284] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.711s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 991.800780] env[68674]: DEBUG oslo_concurrency.lockutils [None req-04a9f848-5acb-4f79-8a83-4774f7bdec07 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.636s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 991.800780] env[68674]: DEBUG oslo_concurrency.lockutils [None req-04a9f848-5acb-4f79-8a83-4774f7bdec07 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 991.802178] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.143s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 991.804860] env[68674]: INFO nova.compute.claims [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 991.838702] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240543, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086597} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.839072] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 991.840015] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db7cb634-f8ef-4285-bb13-3c885fc84aa8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.850442] env[68674]: INFO nova.scheduler.client.report [None req-04a9f848-5acb-4f79-8a83-4774f7bdec07 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Deleted allocations for instance e894cd36-95c8-473b-9bbd-483f11fb5add [ 991.879938] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Reconfiguring VM instance instance-0000004d to attach disk [datastore2] 2d02adff-9fbf-4889-99e4-4efde5a51b33/2d02adff-9fbf-4889-99e4-4efde5a51b33.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 991.883530] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f4e43dcf-289c-4910-971c-aa2c9c332798 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.917117] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 991.917117] env[68674]: value = "task-3240544" [ 991.917117] env[68674]: _type = "Task" [ 991.917117] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.927103] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240544, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.163939] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240539, 'name': CreateVM_Task, 'duration_secs': 0.631896} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.164541] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 992.165297] env[68674]: DEBUG oslo_concurrency.lockutils [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.165474] env[68674]: DEBUG oslo_concurrency.lockutils [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 992.166193] env[68674]: DEBUG oslo_concurrency.lockutils [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 992.166356] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24443f62-2786-45e7-83fa-b8eb1b65eb6e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.172584] env[68674]: DEBUG oslo_vmware.api [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Waiting for the task: (returnval){ [ 992.172584] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52871691-56cd-8a54-861c-e12276bf7faa" [ 992.172584] env[68674]: _type = "Task" [ 992.172584] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.182846] env[68674]: DEBUG oslo_vmware.api [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52871691-56cd-8a54-861c-e12276bf7faa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.201698] env[68674]: DEBUG nova.network.neutron [req-a04e4778-0e67-415b-99a6-15ad3cc1f043 req-c8741d84-7e29-4b29-b531-4807ce85b0a3 service nova] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Updated VIF entry in instance network info cache for port 90c050b9-8e8d-439c-b41f-874750aa76ab. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 992.202104] env[68674]: DEBUG nova.network.neutron [req-a04e4778-0e67-415b-99a6-15ad3cc1f043 req-c8741d84-7e29-4b29-b531-4807ce85b0a3 service nova] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Updating instance_info_cache with network_info: [{"id": "90c050b9-8e8d-439c-b41f-874750aa76ab", "address": "fa:16:3e:cc:44:d8", "network": {"id": "dd08b721-4e1e-4a63-b8f2-7a4995f58edd", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.55", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "bbb86a948b114fbc93d96c17f472fc3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90c050b9-8e", "ovs_interfaceid": "90c050b9-8e8d-439c-b41f-874750aa76ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.242995] env[68674]: DEBUG nova.objects.instance [None req-5b71d827-149a-4384-bf35-53aeab5d796a tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lazy-loading 'flavor' on Instance uuid 7d953e59-53c1-4041-a641-35c12c012f7e {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 992.254108] env[68674]: DEBUG oslo_vmware.api [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Task: {'id': task-3240542, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.666394} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.258418] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77/bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 992.258737] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 992.258965] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4f9fdc12-8bbf-432a-b7f3-d9e38460ffe1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.270556] env[68674]: DEBUG oslo_vmware.api [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Waiting for the task: (returnval){ [ 992.270556] env[68674]: value = "task-3240545" [ 992.270556] env[68674]: _type = "Task" [ 992.270556] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.285484] env[68674]: DEBUG oslo_vmware.api [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Task: {'id': task-3240545, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.381402] env[68674]: DEBUG nova.network.neutron [req-c8883d19-b903-40b6-aab6-76476541054a req-339fa0be-1b9e-4e54-a02d-50cb07628817 service nova] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Updated VIF entry in instance network info cache for port 4fae8d88-2aaa-48bd-b0c4-72bc768efce3. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 992.381874] env[68674]: DEBUG nova.network.neutron [req-c8883d19-b903-40b6-aab6-76476541054a req-339fa0be-1b9e-4e54-a02d-50cb07628817 service nova] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Updating instance_info_cache with network_info: [{"id": "4fae8d88-2aaa-48bd-b0c4-72bc768efce3", "address": "fa:16:3e:f8:a7:c2", "network": {"id": "cd9a6296-fa96-4117-b8b5-3884d0d16745", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1543887384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61ea6bfeb37d470a970e9c98e4827ade", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fae8d88-2a", "ovs_interfaceid": "4fae8d88-2aaa-48bd-b0c4-72bc768efce3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.406940] env[68674]: DEBUG oslo_concurrency.lockutils [None req-04a9f848-5acb-4f79-8a83-4774f7bdec07 tempest-ServerTagsTestJSON-480075311 tempest-ServerTagsTestJSON-480075311-project-member] Lock "e894cd36-95c8-473b-9bbd-483f11fb5add" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.717s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 992.427769] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240544, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.685242] env[68674]: DEBUG oslo_vmware.api [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52871691-56cd-8a54-861c-e12276bf7faa, 'name': SearchDatastore_Task, 'duration_secs': 0.030708} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.685660] env[68674]: DEBUG oslo_concurrency.lockutils [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 992.685966] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 992.686379] env[68674]: DEBUG oslo_concurrency.lockutils [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.686582] env[68674]: DEBUG oslo_concurrency.lockutils [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 992.686803] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 992.687484] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-891c037c-f476-460d-9601-d0e73017ade6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.699025] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 992.699362] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 992.700135] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76b1c30b-bbd9-4251-9715-9e2dd8b63bae {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.704728] env[68674]: DEBUG oslo_concurrency.lockutils [req-a04e4778-0e67-415b-99a6-15ad3cc1f043 req-c8741d84-7e29-4b29-b531-4807ce85b0a3 service nova] Releasing lock "refresh_cache-7a13c52a-328a-4baa-827f-4f2e9cd29269" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 992.707715] env[68674]: DEBUG oslo_vmware.api [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Waiting for the task: (returnval){ [ 992.707715] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52126db4-1548-a165-e71e-7c4c29ae8dde" [ 992.707715] env[68674]: _type = "Task" [ 992.707715] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.716851] env[68674]: DEBUG oslo_vmware.api [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52126db4-1548-a165-e71e-7c4c29ae8dde, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.781808] env[68674]: DEBUG oslo_vmware.api [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Task: {'id': task-3240545, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076235} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.781983] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 992.782749] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-000cb10d-42e3-4466-9699-7655571444a3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.805662] env[68674]: DEBUG nova.objects.instance [None req-431fdfd1-bc0b-4766-8545-c92548b8ec1a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lazy-loading 'flavor' on Instance uuid f70145c9-4846-42e1-9c1c-de9759097abd {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 992.817276] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Reconfiguring VM instance instance-0000005a to attach disk [datastore2] bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77/bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 992.820970] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3455ba4d-3d44-4104-ab90-6a818534f29f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.855197] env[68674]: DEBUG oslo_vmware.api [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Waiting for the task: (returnval){ [ 992.855197] env[68674]: value = "task-3240546" [ 992.855197] env[68674]: _type = "Task" [ 992.855197] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.867290] env[68674]: DEBUG oslo_vmware.api [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Task: {'id': task-3240546, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.886649] env[68674]: DEBUG oslo_concurrency.lockutils [req-c8883d19-b903-40b6-aab6-76476541054a req-339fa0be-1b9e-4e54-a02d-50cb07628817 service nova] Releasing lock "refresh_cache-2a7a6269-65a8-402c-b174-a4a46d20a33a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 992.929837] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240544, 'name': ReconfigVM_Task, 'duration_secs': 0.739845} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.933115] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Reconfigured VM instance instance-0000004d to attach disk [datastore2] 2d02adff-9fbf-4889-99e4-4efde5a51b33/2d02adff-9fbf-4889-99e4-4efde5a51b33.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 992.934555] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'device_type': 'disk', 'size': 0, 'encryption_format': None, 'device_name': '/dev/sda', 'encrypted': False, 'boot_index': 0, 'disk_bus': None, 'encryption_options': None, 'guest_format': None, 'encryption_secret_uuid': None, 'image_id': 'b84d9354-ef6b-46ca-9dae-6549fa89bbea'}], 'ephemerals': [], 'block_device_mapping': [{'device_type': None, 'attachment_id': 'dc3d0db4-641f-4701-8e75-71558aa36cce', 'mount_device': '/dev/sdb', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647627', 'volume_id': '2110fa31-61e9-4ce5-a495-f1f566fee58d', 'name': 'volume-2110fa31-61e9-4ce5-a495-f1f566fee58d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2d02adff-9fbf-4889-99e4-4efde5a51b33', 'attached_at': '', 'detached_at': '', 'volume_id': '2110fa31-61e9-4ce5-a495-f1f566fee58d', 'serial': '2110fa31-61e9-4ce5-a495-f1f566fee58d'}, 'boot_index': None, 'disk_bus': None, 'guest_format': None, 'delete_on_termination': False, 'volume_type': None}], 'swap': None} {{(pid=68674) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 992.934777] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Volume attach. Driver type: vmdk {{(pid=68674) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 992.934970] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647627', 'volume_id': '2110fa31-61e9-4ce5-a495-f1f566fee58d', 'name': 'volume-2110fa31-61e9-4ce5-a495-f1f566fee58d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2d02adff-9fbf-4889-99e4-4efde5a51b33', 'attached_at': '', 'detached_at': '', 'volume_id': '2110fa31-61e9-4ce5-a495-f1f566fee58d', 'serial': '2110fa31-61e9-4ce5-a495-f1f566fee58d'} {{(pid=68674) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 992.936575] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee9c21f4-a07e-45fe-b690-427f33ac442e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.966977] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5a40bc0-f3d6-42b2-9c21-0c180750fe39 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.997087] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] volume-2110fa31-61e9-4ce5-a495-f1f566fee58d/volume-2110fa31-61e9-4ce5-a495-f1f566fee58d.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 993.000055] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-107204c4-ea24-4147-9421-254cdb7b9ed1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.023635] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 993.023635] env[68674]: value = "task-3240547" [ 993.023635] env[68674]: _type = "Task" [ 993.023635] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.038577] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240547, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.219433] env[68674]: DEBUG oslo_vmware.api [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52126db4-1548-a165-e71e-7c4c29ae8dde, 'name': SearchDatastore_Task, 'duration_secs': 0.019871} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.223886] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87eb3e67-8a09-4b45-afcf-add532fbd6c7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.230671] env[68674]: DEBUG oslo_vmware.api [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Waiting for the task: (returnval){ [ 993.230671] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]520d41e0-c87e-ffad-f179-3c63654d210b" [ 993.230671] env[68674]: _type = "Task" [ 993.230671] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.240081] env[68674]: DEBUG oslo_vmware.api [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]520d41e0-c87e-ffad-f179-3c63654d210b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.242020] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a050e203-b031-4911-b81a-0e5c5b37585d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.250317] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddf79ef8-16a1-46e1-b72a-5eb72224d739 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.287566] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5b71d827-149a-4384-bf35-53aeab5d796a tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lock "7d953e59-53c1-4041-a641-35c12c012f7e" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.403s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 993.289565] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d39239e7-d7a6-42e0-845d-5f5040ba34c5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.303266] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a01b96c5-6066-4172-b1c9-79d65872a7fe {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.317789] env[68674]: DEBUG nova.compute.provider_tree [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 993.329118] env[68674]: DEBUG oslo_concurrency.lockutils [None req-431fdfd1-bc0b-4766-8545-c92548b8ec1a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "f70145c9-4846-42e1-9c1c-de9759097abd" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.500s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 993.367238] env[68674]: DEBUG oslo_vmware.api [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Task: {'id': task-3240546, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.530714] env[68674]: DEBUG nova.compute.manager [req-6d728409-14b9-4d2b-9bf4-91c02702b523 req-e52006bc-dad3-42de-a8fc-c41b7c05fd08 service nova] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Received event network-changed-e6a3416c-8601-4d3f-8b5b-74d43a100d6c {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 993.532055] env[68674]: DEBUG nova.compute.manager [req-6d728409-14b9-4d2b-9bf4-91c02702b523 req-e52006bc-dad3-42de-a8fc-c41b7c05fd08 service nova] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Refreshing instance network info cache due to event network-changed-e6a3416c-8601-4d3f-8b5b-74d43a100d6c. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 993.532055] env[68674]: DEBUG oslo_concurrency.lockutils [req-6d728409-14b9-4d2b-9bf4-91c02702b523 req-e52006bc-dad3-42de-a8fc-c41b7c05fd08 service nova] Acquiring lock "refresh_cache-fa8c58b7-a462-437f-b1ed-57fef6aa3903" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.532055] env[68674]: DEBUG oslo_concurrency.lockutils [req-6d728409-14b9-4d2b-9bf4-91c02702b523 req-e52006bc-dad3-42de-a8fc-c41b7c05fd08 service nova] Acquired lock "refresh_cache-fa8c58b7-a462-437f-b1ed-57fef6aa3903" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 993.532055] env[68674]: DEBUG nova.network.neutron [req-6d728409-14b9-4d2b-9bf4-91c02702b523 req-e52006bc-dad3-42de-a8fc-c41b7c05fd08 service nova] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Refreshing network info cache for port e6a3416c-8601-4d3f-8b5b-74d43a100d6c {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 993.540813] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240547, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.742667] env[68674]: DEBUG oslo_vmware.api [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]520d41e0-c87e-ffad-f179-3c63654d210b, 'name': SearchDatastore_Task, 'duration_secs': 0.026602} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.742995] env[68674]: DEBUG oslo_concurrency.lockutils [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 993.743308] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 7a13c52a-328a-4baa-827f-4f2e9cd29269/7a13c52a-328a-4baa-827f-4f2e9cd29269.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 993.743541] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-618efe49-e8ea-4a07-b614-a3edc23c9ed5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.756443] env[68674]: DEBUG oslo_vmware.api [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Waiting for the task: (returnval){ [ 993.756443] env[68674]: value = "task-3240548" [ 993.756443] env[68674]: _type = "Task" [ 993.756443] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.769908] env[68674]: DEBUG oslo_vmware.api [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Task: {'id': task-3240548, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.829405] env[68674]: DEBUG nova.scheduler.client.report [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 993.871516] env[68674]: DEBUG oslo_vmware.api [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Task: {'id': task-3240546, 'name': ReconfigVM_Task, 'duration_secs': 0.603113} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.871838] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Reconfigured VM instance instance-0000005a to attach disk [datastore2] bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77/bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 993.872685] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b08b9b5a-1eda-452f-afdb-b123ec7045b3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.882049] env[68674]: DEBUG oslo_vmware.api [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Waiting for the task: (returnval){ [ 993.882049] env[68674]: value = "task-3240549" [ 993.882049] env[68674]: _type = "Task" [ 993.882049] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.894885] env[68674]: DEBUG oslo_vmware.api [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Task: {'id': task-3240549, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.040771] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240547, 'name': ReconfigVM_Task, 'duration_secs': 0.561403} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.041456] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Reconfigured VM instance instance-0000004d to attach disk [datastore1] volume-2110fa31-61e9-4ce5-a495-f1f566fee58d/volume-2110fa31-61e9-4ce5-a495-f1f566fee58d.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 994.047553] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5c9d0d44-4e58-4fa6-8538-3b4ad9bdea85 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.069539] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 994.069539] env[68674]: value = "task-3240550" [ 994.069539] env[68674]: _type = "Task" [ 994.069539] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.085711] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240550, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.270868] env[68674]: DEBUG oslo_vmware.api [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Task: {'id': task-3240548, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.338749] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.536s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 994.339358] env[68674]: DEBUG nova.compute.manager [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 994.347428] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b0d1aca4-17f6-4b60-8f8b-c47b90ab7730 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.835s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 994.347682] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b0d1aca4-17f6-4b60-8f8b-c47b90ab7730 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.004s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 994.350547] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a81d5820-6d82-4dc3-af70-335d195abcec tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 10.445s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 994.379908] env[68674]: INFO nova.scheduler.client.report [None req-b0d1aca4-17f6-4b60-8f8b-c47b90ab7730 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Deleted allocations for instance 691f9f14-4f53-46a4-8bf7-d027cfdd37e8 [ 994.402971] env[68674]: DEBUG oslo_vmware.api [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Task: {'id': task-3240549, 'name': Rename_Task, 'duration_secs': 0.22835} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.403672] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 994.404104] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-900486f4-673d-4741-a480-78533925ab94 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.417341] env[68674]: DEBUG oslo_vmware.api [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Waiting for the task: (returnval){ [ 994.417341] env[68674]: value = "task-3240551" [ 994.417341] env[68674]: _type = "Task" [ 994.417341] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.433071] env[68674]: DEBUG oslo_vmware.api [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Task: {'id': task-3240551, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.448733] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bfd11711-5b33-4ae4-b2a7-67d774bbe5cd tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquiring lock "7d953e59-53c1-4041-a641-35c12c012f7e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 994.448733] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bfd11711-5b33-4ae4-b2a7-67d774bbe5cd tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lock "7d953e59-53c1-4041-a641-35c12c012f7e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 994.448861] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bfd11711-5b33-4ae4-b2a7-67d774bbe5cd tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquiring lock "7d953e59-53c1-4041-a641-35c12c012f7e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 994.448944] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bfd11711-5b33-4ae4-b2a7-67d774bbe5cd tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lock "7d953e59-53c1-4041-a641-35c12c012f7e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 994.449134] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bfd11711-5b33-4ae4-b2a7-67d774bbe5cd tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lock "7d953e59-53c1-4041-a641-35c12c012f7e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 994.451907] env[68674]: INFO nova.compute.manager [None req-bfd11711-5b33-4ae4-b2a7-67d774bbe5cd tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Terminating instance [ 994.490149] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Acquiring lock "cab97ca7-968b-4d40-bb1f-2244469e1b56" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 994.490340] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Lock "cab97ca7-968b-4d40-bb1f-2244469e1b56" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 994.495596] env[68674]: DEBUG nova.network.neutron [req-6d728409-14b9-4d2b-9bf4-91c02702b523 req-e52006bc-dad3-42de-a8fc-c41b7c05fd08 service nova] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Updated VIF entry in instance network info cache for port e6a3416c-8601-4d3f-8b5b-74d43a100d6c. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 994.495596] env[68674]: DEBUG nova.network.neutron [req-6d728409-14b9-4d2b-9bf4-91c02702b523 req-e52006bc-dad3-42de-a8fc-c41b7c05fd08 service nova] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Updating instance_info_cache with network_info: [{"id": "e6a3416c-8601-4d3f-8b5b-74d43a100d6c", "address": "fa:16:3e:28:43:ab", "network": {"id": "4afa0a78-13f3-4d61-91ab-0ff686045241", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1736462154-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.137", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d376e63760b4b708305a7b0aafd98a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4712af2-45ef-4652-8d2c-482ec70056d0", "external-id": "nsx-vlan-transportzone-826", "segmentation_id": 826, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6a3416c-86", "ovs_interfaceid": "e6a3416c-8601-4d3f-8b5b-74d43a100d6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.582408] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240550, 'name': ReconfigVM_Task, 'duration_secs': 0.335008} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.582833] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647627', 'volume_id': '2110fa31-61e9-4ce5-a495-f1f566fee58d', 'name': 'volume-2110fa31-61e9-4ce5-a495-f1f566fee58d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2d02adff-9fbf-4889-99e4-4efde5a51b33', 'attached_at': '', 'detached_at': '', 'volume_id': '2110fa31-61e9-4ce5-a495-f1f566fee58d', 'serial': '2110fa31-61e9-4ce5-a495-f1f566fee58d'} {{(pid=68674) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 994.583567] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ff286bc6-e754-44a7-b8fe-11357b57c28c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.595137] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 994.595137] env[68674]: value = "task-3240552" [ 994.595137] env[68674]: _type = "Task" [ 994.595137] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.610435] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240552, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.767572] env[68674]: DEBUG oslo_vmware.api [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Task: {'id': task-3240548, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.723174} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.767918] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 7a13c52a-328a-4baa-827f-4f2e9cd29269/7a13c52a-328a-4baa-827f-4f2e9cd29269.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 994.768079] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 994.768636] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-83607b55-1aaa-4fb1-bd19-2bdc1eb30a7e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.776976] env[68674]: DEBUG oslo_vmware.api [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Waiting for the task: (returnval){ [ 994.776976] env[68674]: value = "task-3240553" [ 994.776976] env[68674]: _type = "Task" [ 994.776976] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.788179] env[68674]: DEBUG oslo_vmware.api [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Task: {'id': task-3240553, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.858659] env[68674]: DEBUG nova.compute.utils [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 994.861445] env[68674]: DEBUG nova.compute.manager [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 994.861445] env[68674]: DEBUG nova.network.neutron [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 994.892644] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b0d1aca4-17f6-4b60-8f8b-c47b90ab7730 tempest-DeleteServersAdminTestJSON-618650618 tempest-DeleteServersAdminTestJSON-618650618-project-member] Lock "691f9f14-4f53-46a4-8bf7-d027cfdd37e8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.446s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 994.930575] env[68674]: DEBUG oslo_vmware.api [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Task: {'id': task-3240551, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.933244] env[68674]: DEBUG nova.policy [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0388229a8ee64f25b0c31cdc4a8b439e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5f5ed5655bc84a88bf9b4b42b4a3b1e4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 994.960777] env[68674]: DEBUG nova.compute.manager [None req-bfd11711-5b33-4ae4-b2a7-67d774bbe5cd tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 994.961110] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bfd11711-5b33-4ae4-b2a7-67d774bbe5cd tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 994.961969] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66850058-61af-4d92-9973-5a5e6e75c853 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.973008] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfd11711-5b33-4ae4-b2a7-67d774bbe5cd tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 994.976518] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2e6b8e9e-6124-4c73-bcb3-3dc35d3abe9a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.985566] env[68674]: DEBUG oslo_vmware.api [None req-bfd11711-5b33-4ae4-b2a7-67d774bbe5cd tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 994.985566] env[68674]: value = "task-3240554" [ 994.985566] env[68674]: _type = "Task" [ 994.985566] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.993071] env[68674]: DEBUG nova.compute.manager [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 995.000452] env[68674]: DEBUG oslo_concurrency.lockutils [req-6d728409-14b9-4d2b-9bf4-91c02702b523 req-e52006bc-dad3-42de-a8fc-c41b7c05fd08 service nova] Releasing lock "refresh_cache-fa8c58b7-a462-437f-b1ed-57fef6aa3903" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 995.001147] env[68674]: DEBUG oslo_vmware.api [None req-bfd11711-5b33-4ae4-b2a7-67d774bbe5cd tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240554, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.114042] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240552, 'name': Rename_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.176752] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "182deaf0-c20a-4041-8f41-81786d6b053e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 995.176968] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "182deaf0-c20a-4041-8f41-81786d6b053e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 995.229612] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb00122a-6f34-4989-ad75-b80334c47387 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.238375] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f9b8a3-ed40-4e85-8939-94df738cfd04 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.273634] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-724707b2-ecc5-4ea0-bb46-03603eb31d50 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.285224] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce45f0b4-c5ff-4a85-8edb-4dbc360d0fa3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.293028] env[68674]: DEBUG oslo_vmware.api [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Task: {'id': task-3240553, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069891} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.293846] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 995.294718] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ff5588e-559b-4a23-905e-b68121b45b3d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.306484] env[68674]: DEBUG nova.compute.provider_tree [None req-a81d5820-6d82-4dc3-af70-335d195abcec tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 995.331930] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] 7a13c52a-328a-4baa-827f-4f2e9cd29269/7a13c52a-328a-4baa-827f-4f2e9cd29269.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 995.333124] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a986ffbc-c8f3-4bb6-af33-3f28f0f8cde1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.356386] env[68674]: DEBUG oslo_vmware.api [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Waiting for the task: (returnval){ [ 995.356386] env[68674]: value = "task-3240555" [ 995.356386] env[68674]: _type = "Task" [ 995.356386] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.363675] env[68674]: DEBUG nova.compute.manager [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 995.371613] env[68674]: DEBUG oslo_vmware.api [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Task: {'id': task-3240555, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.379797] env[68674]: DEBUG nova.network.neutron [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Successfully created port: a90f2990-e03b-4fce-b01d-d1fe57be7c57 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 995.430570] env[68674]: DEBUG oslo_vmware.api [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Task: {'id': task-3240551, 'name': PowerOnVM_Task, 'duration_secs': 0.655041} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.430875] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 995.431178] env[68674]: INFO nova.compute.manager [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Took 8.92 seconds to spawn the instance on the hypervisor. [ 995.431309] env[68674]: DEBUG nova.compute.manager [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 995.432206] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d2821ff-1797-4585-bbde-28b98b3c65d1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.497335] env[68674]: DEBUG oslo_vmware.api [None req-bfd11711-5b33-4ae4-b2a7-67d774bbe5cd tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240554, 'name': PowerOffVM_Task, 'duration_secs': 0.501392} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.497624] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfd11711-5b33-4ae4-b2a7-67d774bbe5cd tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 995.497785] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bfd11711-5b33-4ae4-b2a7-67d774bbe5cd tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 995.498047] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-38230afc-616a-4748-ba33-ce83a4c501aa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.525033] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 995.590025] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bfd11711-5b33-4ae4-b2a7-67d774bbe5cd tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 995.590363] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bfd11711-5b33-4ae4-b2a7-67d774bbe5cd tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 995.590629] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-bfd11711-5b33-4ae4-b2a7-67d774bbe5cd tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Deleting the datastore file [datastore1] 7d953e59-53c1-4041-a641-35c12c012f7e {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 995.591050] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-00e18cbf-40d4-4c80-a384-e892bb60d8c5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.602400] env[68674]: DEBUG oslo_vmware.api [None req-bfd11711-5b33-4ae4-b2a7-67d774bbe5cd tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 995.602400] env[68674]: value = "task-3240557" [ 995.602400] env[68674]: _type = "Task" [ 995.602400] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.609344] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240552, 'name': Rename_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.616661] env[68674]: DEBUG oslo_vmware.api [None req-bfd11711-5b33-4ae4-b2a7-67d774bbe5cd tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240557, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.683674] env[68674]: DEBUG nova.compute.manager [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 995.809415] env[68674]: DEBUG nova.scheduler.client.report [None req-a81d5820-6d82-4dc3-af70-335d195abcec tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 995.871281] env[68674]: DEBUG oslo_vmware.api [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Task: {'id': task-3240555, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.957821] env[68674]: INFO nova.compute.manager [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Took 26.77 seconds to build instance. [ 996.110432] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240552, 'name': Rename_Task, 'duration_secs': 1.038695} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.111166] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 996.111432] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ec5ec7a5-226e-4671-bf94-0ae9fc0760ce {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.116543] env[68674]: DEBUG oslo_vmware.api [None req-bfd11711-5b33-4ae4-b2a7-67d774bbe5cd tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240557, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.403007} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.117851] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-bfd11711-5b33-4ae4-b2a7-67d774bbe5cd tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 996.118065] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bfd11711-5b33-4ae4-b2a7-67d774bbe5cd tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 996.118252] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bfd11711-5b33-4ae4-b2a7-67d774bbe5cd tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 996.118430] env[68674]: INFO nova.compute.manager [None req-bfd11711-5b33-4ae4-b2a7-67d774bbe5cd tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Took 1.16 seconds to destroy the instance on the hypervisor. [ 996.118755] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bfd11711-5b33-4ae4-b2a7-67d774bbe5cd tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 996.118915] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 996.118915] env[68674]: value = "task-3240558" [ 996.118915] env[68674]: _type = "Task" [ 996.118915] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.119124] env[68674]: DEBUG nova.compute.manager [-] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 996.119210] env[68674]: DEBUG nova.network.neutron [-] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 996.129495] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240558, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.216243] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 996.371864] env[68674]: DEBUG oslo_vmware.api [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Task: {'id': task-3240555, 'name': ReconfigVM_Task, 'duration_secs': 0.663428} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.373236] env[68674]: DEBUG nova.compute.manager [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 996.375625] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Reconfigured VM instance instance-0000005b to attach disk [datastore2] 7a13c52a-328a-4baa-827f-4f2e9cd29269/7a13c52a-328a-4baa-827f-4f2e9cd29269.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 996.376524] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-54a33f72-397c-4ac8-a64b-bd6ab9a87cc4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.385972] env[68674]: DEBUG oslo_vmware.api [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Waiting for the task: (returnval){ [ 996.385972] env[68674]: value = "task-3240559" [ 996.385972] env[68674]: _type = "Task" [ 996.385972] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.396628] env[68674]: DEBUG oslo_vmware.api [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Task: {'id': task-3240559, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.409032] env[68674]: DEBUG nova.virt.hardware [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 996.409402] env[68674]: DEBUG nova.virt.hardware [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 996.409552] env[68674]: DEBUG nova.virt.hardware [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 996.409757] env[68674]: DEBUG nova.virt.hardware [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 996.409927] env[68674]: DEBUG nova.virt.hardware [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 996.410091] env[68674]: DEBUG nova.virt.hardware [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 996.410309] env[68674]: DEBUG nova.virt.hardware [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 996.410516] env[68674]: DEBUG nova.virt.hardware [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 996.410707] env[68674]: DEBUG nova.virt.hardware [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 996.410875] env[68674]: DEBUG nova.virt.hardware [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 996.411073] env[68674]: DEBUG nova.virt.hardware [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 996.412111] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21945983-776d-491d-9bc9-d038056087c8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.423668] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78f6a6f3-172c-4d71-b87e-b7d55ba6b3af {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.463831] env[68674]: DEBUG oslo_concurrency.lockutils [None req-efa72c17-680e-4847-873d-378760f38e99 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Lock "bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.288s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.634670] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240558, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.824256] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a81d5820-6d82-4dc3-af70-335d195abcec tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.474s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.827642] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b40cd1b7-3f28-45aa-9d60-843bfbc5fe30 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.491s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 996.827863] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b40cd1b7-3f28-45aa-9d60-843bfbc5fe30 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.830899] env[68674]: DEBUG oslo_concurrency.lockutils [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.414s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 996.832748] env[68674]: INFO nova.compute.claims [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 996.864860] env[68674]: INFO nova.scheduler.client.report [None req-b40cd1b7-3f28-45aa-9d60-843bfbc5fe30 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Deleted allocations for instance 0e7c5243-ad33-4391-8977-b9019643e3de [ 996.905124] env[68674]: DEBUG oslo_vmware.api [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Task: {'id': task-3240559, 'name': Rename_Task, 'duration_secs': 0.261766} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.905493] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 996.905822] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4aa73377-9733-455e-be6f-7ae4b21fa61a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.923029] env[68674]: DEBUG oslo_vmware.api [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Waiting for the task: (returnval){ [ 996.923029] env[68674]: value = "task-3240560" [ 996.923029] env[68674]: _type = "Task" [ 996.923029] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.935967] env[68674]: DEBUG oslo_vmware.api [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Task: {'id': task-3240560, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.121654] env[68674]: DEBUG nova.compute.manager [req-073f983a-1298-4b10-a220-5cd4e39b6d54 req-d58c390d-2779-4747-8423-be8088eaf2c0 service nova] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Received event network-vif-deleted-856fc34c-4049-4185-9ab1-8f86e2cfdeff {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 997.121835] env[68674]: INFO nova.compute.manager [req-073f983a-1298-4b10-a220-5cd4e39b6d54 req-d58c390d-2779-4747-8423-be8088eaf2c0 service nova] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Neutron deleted interface 856fc34c-4049-4185-9ab1-8f86e2cfdeff; detaching it from the instance and deleting it from the info cache [ 997.122014] env[68674]: DEBUG nova.network.neutron [req-073f983a-1298-4b10-a220-5cd4e39b6d54 req-d58c390d-2779-4747-8423-be8088eaf2c0 service nova] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.134899] env[68674]: DEBUG oslo_vmware.api [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240558, 'name': PowerOnVM_Task, 'duration_secs': 0.848599} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.135632] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 997.135898] env[68674]: DEBUG nova.compute.manager [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 997.136896] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8c79bb1-ead2-4f35-8f6b-abf1df2d0943 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.179071] env[68674]: DEBUG oslo_concurrency.lockutils [None req-89de4556-077a-44ea-a2fa-f34dde0ed400 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Acquiring lock "bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 997.179346] env[68674]: DEBUG oslo_concurrency.lockutils [None req-89de4556-077a-44ea-a2fa-f34dde0ed400 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Lock "bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 997.179555] env[68674]: DEBUG oslo_concurrency.lockutils [None req-89de4556-077a-44ea-a2fa-f34dde0ed400 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Acquiring lock "bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 997.179738] env[68674]: DEBUG oslo_concurrency.lockutils [None req-89de4556-077a-44ea-a2fa-f34dde0ed400 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Lock "bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 997.179917] env[68674]: DEBUG oslo_concurrency.lockutils [None req-89de4556-077a-44ea-a2fa-f34dde0ed400 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Lock "bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 997.182713] env[68674]: INFO nova.compute.manager [None req-89de4556-077a-44ea-a2fa-f34dde0ed400 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Terminating instance [ 997.381116] env[68674]: DEBUG nova.network.neutron [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Successfully updated port: a90f2990-e03b-4fce-b01d-d1fe57be7c57 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 997.387765] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b40cd1b7-3f28-45aa-9d60-843bfbc5fe30 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "0e7c5243-ad33-4391-8977-b9019643e3de" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.808s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 997.430184] env[68674]: INFO nova.scheduler.client.report [None req-a81d5820-6d82-4dc3-af70-335d195abcec tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Deleted allocation for migration 8ebdc340-6af1-4ea5-99e1-3cf57304ed57 [ 997.434694] env[68674]: DEBUG oslo_vmware.api [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Task: {'id': task-3240560, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.520181] env[68674]: DEBUG nova.network.neutron [-] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.625812] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a5f073c6-bcab-4dc0-a5ea-650eb7d6a6c2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.639311] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c0fabc-a26a-4ecc-bad8-f3f818d61056 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.661766] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 997.678457] env[68674]: DEBUG nova.compute.manager [req-073f983a-1298-4b10-a220-5cd4e39b6d54 req-d58c390d-2779-4747-8423-be8088eaf2c0 service nova] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Detach interface failed, port_id=856fc34c-4049-4185-9ab1-8f86e2cfdeff, reason: Instance 7d953e59-53c1-4041-a641-35c12c012f7e could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 997.689811] env[68674]: DEBUG nova.compute.manager [None req-89de4556-077a-44ea-a2fa-f34dde0ed400 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 997.689811] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-89de4556-077a-44ea-a2fa-f34dde0ed400 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 997.690861] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a82a7947-fde0-4143-8f08-439ed92ce445 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.700363] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-89de4556-077a-44ea-a2fa-f34dde0ed400 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 997.700674] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4eb49992-7989-4f0d-83e8-fe7c829d0f0a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.709344] env[68674]: DEBUG oslo_vmware.api [None req-89de4556-077a-44ea-a2fa-f34dde0ed400 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Waiting for the task: (returnval){ [ 997.709344] env[68674]: value = "task-3240561" [ 997.709344] env[68674]: _type = "Task" [ 997.709344] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.720125] env[68674]: DEBUG oslo_vmware.api [None req-89de4556-077a-44ea-a2fa-f34dde0ed400 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Task: {'id': task-3240561, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.886501] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Acquiring lock "refresh_cache-f6f5fb73-521a-4c83-93ea-a1eb2af2e142" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.886765] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Acquired lock "refresh_cache-f6f5fb73-521a-4c83-93ea-a1eb2af2e142" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 997.887300] env[68674]: DEBUG nova.network.neutron [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 997.933075] env[68674]: DEBUG oslo_vmware.api [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Task: {'id': task-3240560, 'name': PowerOnVM_Task, 'duration_secs': 0.600578} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.933493] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 997.933720] env[68674]: INFO nova.compute.manager [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Took 9.81 seconds to spawn the instance on the hypervisor. [ 997.933907] env[68674]: DEBUG nova.compute.manager [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 997.939160] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eb3e30c-2a6f-4136-80f6-0e3a31321b27 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.943700] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a81d5820-6d82-4dc3-af70-335d195abcec tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "ffdd1c62-1b4e-40cf-a27e-ff2877439701" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 17.075s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 998.023050] env[68674]: INFO nova.compute.manager [-] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Took 1.90 seconds to deallocate network for instance. [ 998.064705] env[68674]: DEBUG oslo_vmware.rw_handles [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52233f04-91fe-1bb3-2b6e-259ece069ec1/disk-0.vmdk. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 998.066327] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bea2edf-98b3-4901-a4f3-48c08268c22a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.079392] env[68674]: DEBUG oslo_vmware.rw_handles [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52233f04-91fe-1bb3-2b6e-259ece069ec1/disk-0.vmdk is in state: ready. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 998.079392] env[68674]: ERROR oslo_vmware.rw_handles [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52233f04-91fe-1bb3-2b6e-259ece069ec1/disk-0.vmdk due to incomplete transfer. [ 998.079538] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-da93e64b-8ecb-413d-a495-195725af0ab1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.091017] env[68674]: DEBUG oslo_vmware.rw_handles [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52233f04-91fe-1bb3-2b6e-259ece069ec1/disk-0.vmdk. {{(pid=68674) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 998.091227] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Uploaded image e1a081da-3f4c-4a57-ab24-e5928d86e493 to the Glance image server {{(pid=68674) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 998.094067] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Destroying the VM {{(pid=68674) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 998.094367] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-f1877961-0d3c-4b2c-b042-8626d8255601 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.110132] env[68674]: DEBUG oslo_vmware.api [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Waiting for the task: (returnval){ [ 998.110132] env[68674]: value = "task-3240562" [ 998.110132] env[68674]: _type = "Task" [ 998.110132] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.130045] env[68674]: DEBUG oslo_vmware.api [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240562, 'name': Destroy_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.210067] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86e98c97-ef08-4acf-a504-60fe7cd4c9e3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.225153] env[68674]: DEBUG oslo_vmware.api [None req-89de4556-077a-44ea-a2fa-f34dde0ed400 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Task: {'id': task-3240561, 'name': PowerOffVM_Task, 'duration_secs': 0.292871} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.226866] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-89de4556-077a-44ea-a2fa-f34dde0ed400 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 998.227151] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-89de4556-077a-44ea-a2fa-f34dde0ed400 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 998.227511] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-89e6a6a6-0220-440f-bfcc-de143a0fc974 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.229820] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43f17ace-4778-4d12-8fed-a8846872d65c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.270168] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f61b4ca3-e752-4dac-888d-20155287872d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.281581] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf03a7ec-55a7-46cb-af91-23e3b453fe80 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.297570] env[68674]: DEBUG nova.compute.provider_tree [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 998.319518] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-89de4556-077a-44ea-a2fa-f34dde0ed400 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 998.320053] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-89de4556-077a-44ea-a2fa-f34dde0ed400 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 998.320348] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-89de4556-077a-44ea-a2fa-f34dde0ed400 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Deleting the datastore file [datastore2] bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 998.321185] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3c11cd78-2ad9-45aa-b4c3-3812692f8690 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.332895] env[68674]: DEBUG oslo_vmware.api [None req-89de4556-077a-44ea-a2fa-f34dde0ed400 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Waiting for the task: (returnval){ [ 998.332895] env[68674]: value = "task-3240564" [ 998.332895] env[68674]: _type = "Task" [ 998.332895] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.342477] env[68674]: DEBUG oslo_vmware.api [None req-89de4556-077a-44ea-a2fa-f34dde0ed400 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Task: {'id': task-3240564, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.456475] env[68674]: DEBUG nova.network.neutron [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 998.466606] env[68674]: INFO nova.compute.manager [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Took 28.48 seconds to build instance. [ 998.530086] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bfd11711-5b33-4ae4-b2a7-67d774bbe5cd tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 998.622197] env[68674]: DEBUG oslo_vmware.api [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240562, 'name': Destroy_Task, 'duration_secs': 0.44891} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.622444] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Destroyed the VM [ 998.624198] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Deleting Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 998.624198] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ab794e3f-0b18-4752-b384-e4c0b5b1f92a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.634748] env[68674]: DEBUG oslo_vmware.api [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Waiting for the task: (returnval){ [ 998.634748] env[68674]: value = "task-3240565" [ 998.634748] env[68674]: _type = "Task" [ 998.634748] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.649038] env[68674]: DEBUG oslo_vmware.api [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240565, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.802157] env[68674]: DEBUG nova.scheduler.client.report [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 998.841682] env[68674]: DEBUG nova.network.neutron [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Updating instance_info_cache with network_info: [{"id": "a90f2990-e03b-4fce-b01d-d1fe57be7c57", "address": "fa:16:3e:cc:b9:14", "network": {"id": "81ca1306-cf11-4d45-9bb8-26dea315dda8", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-558830138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f5ed5655bc84a88bf9b4b42b4a3b1e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc845e3-654b-43c6-acea-dde1084f0ad0", "external-id": "nsx-vlan-transportzone-344", "segmentation_id": 344, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa90f2990-e0", "ovs_interfaceid": "a90f2990-e03b-4fce-b01d-d1fe57be7c57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.854446] env[68674]: DEBUG oslo_vmware.api [None req-89de4556-077a-44ea-a2fa-f34dde0ed400 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Task: {'id': task-3240564, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.277379} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.854555] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-89de4556-077a-44ea-a2fa-f34dde0ed400 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 998.855248] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-89de4556-077a-44ea-a2fa-f34dde0ed400 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 998.855393] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-89de4556-077a-44ea-a2fa-f34dde0ed400 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 998.856604] env[68674]: INFO nova.compute.manager [None req-89de4556-077a-44ea-a2fa-f34dde0ed400 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Took 1.17 seconds to destroy the instance on the hypervisor. [ 998.856604] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-89de4556-077a-44ea-a2fa-f34dde0ed400 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 998.857023] env[68674]: DEBUG nova.compute.manager [-] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 998.857146] env[68674]: DEBUG nova.network.neutron [-] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 998.969263] env[68674]: DEBUG oslo_concurrency.lockutils [None req-eeab99c4-64ce-4695-b32a-02be827fa4d2 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Lock "7a13c52a-328a-4baa-827f-4f2e9cd29269" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.999s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 999.151814] env[68674]: DEBUG oslo_vmware.api [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240565, 'name': RemoveSnapshot_Task} progress is 15%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.265161] env[68674]: DEBUG nova.compute.manager [req-2310ff07-3f9c-4b60-af32-e0ef82851da4 req-07bb6fb2-4188-4d7c-9262-f73562591873 service nova] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Received event network-vif-plugged-a90f2990-e03b-4fce-b01d-d1fe57be7c57 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 999.265391] env[68674]: DEBUG oslo_concurrency.lockutils [req-2310ff07-3f9c-4b60-af32-e0ef82851da4 req-07bb6fb2-4188-4d7c-9262-f73562591873 service nova] Acquiring lock "f6f5fb73-521a-4c83-93ea-a1eb2af2e142-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 999.265604] env[68674]: DEBUG oslo_concurrency.lockutils [req-2310ff07-3f9c-4b60-af32-e0ef82851da4 req-07bb6fb2-4188-4d7c-9262-f73562591873 service nova] Lock "f6f5fb73-521a-4c83-93ea-a1eb2af2e142-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 999.265917] env[68674]: DEBUG oslo_concurrency.lockutils [req-2310ff07-3f9c-4b60-af32-e0ef82851da4 req-07bb6fb2-4188-4d7c-9262-f73562591873 service nova] Lock "f6f5fb73-521a-4c83-93ea-a1eb2af2e142-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 999.265985] env[68674]: DEBUG nova.compute.manager [req-2310ff07-3f9c-4b60-af32-e0ef82851da4 req-07bb6fb2-4188-4d7c-9262-f73562591873 service nova] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] No waiting events found dispatching network-vif-plugged-a90f2990-e03b-4fce-b01d-d1fe57be7c57 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 999.266429] env[68674]: WARNING nova.compute.manager [req-2310ff07-3f9c-4b60-af32-e0ef82851da4 req-07bb6fb2-4188-4d7c-9262-f73562591873 service nova] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Received unexpected event network-vif-plugged-a90f2990-e03b-4fce-b01d-d1fe57be7c57 for instance with vm_state building and task_state spawning. [ 999.266726] env[68674]: DEBUG nova.compute.manager [req-2310ff07-3f9c-4b60-af32-e0ef82851da4 req-07bb6fb2-4188-4d7c-9262-f73562591873 service nova] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Received event network-changed-a90f2990-e03b-4fce-b01d-d1fe57be7c57 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 999.266946] env[68674]: DEBUG nova.compute.manager [req-2310ff07-3f9c-4b60-af32-e0ef82851da4 req-07bb6fb2-4188-4d7c-9262-f73562591873 service nova] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Refreshing instance network info cache due to event network-changed-a90f2990-e03b-4fce-b01d-d1fe57be7c57. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 999.267335] env[68674]: DEBUG oslo_concurrency.lockutils [req-2310ff07-3f9c-4b60-af32-e0ef82851da4 req-07bb6fb2-4188-4d7c-9262-f73562591873 service nova] Acquiring lock "refresh_cache-f6f5fb73-521a-4c83-93ea-a1eb2af2e142" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.310718] env[68674]: DEBUG oslo_concurrency.lockutils [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.480s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 999.311270] env[68674]: DEBUG nova.compute.manager [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 999.316531] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.568s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 999.317976] env[68674]: INFO nova.compute.claims [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 999.320640] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8ffb2324-d766-4c78-bb81-70c900b73d95 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Acquiring lock "7a13c52a-328a-4baa-827f-4f2e9cd29269" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 999.320859] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8ffb2324-d766-4c78-bb81-70c900b73d95 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Lock "7a13c52a-328a-4baa-827f-4f2e9cd29269" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 999.321065] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8ffb2324-d766-4c78-bb81-70c900b73d95 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Acquiring lock "7a13c52a-328a-4baa-827f-4f2e9cd29269-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 999.321254] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8ffb2324-d766-4c78-bb81-70c900b73d95 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Lock "7a13c52a-328a-4baa-827f-4f2e9cd29269-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 999.321419] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8ffb2324-d766-4c78-bb81-70c900b73d95 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Lock "7a13c52a-328a-4baa-827f-4f2e9cd29269-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 999.326148] env[68674]: INFO nova.compute.manager [None req-8ffb2324-d766-4c78-bb81-70c900b73d95 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Terminating instance [ 999.346182] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Releasing lock "refresh_cache-f6f5fb73-521a-4c83-93ea-a1eb2af2e142" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 999.346182] env[68674]: DEBUG nova.compute.manager [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Instance network_info: |[{"id": "a90f2990-e03b-4fce-b01d-d1fe57be7c57", "address": "fa:16:3e:cc:b9:14", "network": {"id": "81ca1306-cf11-4d45-9bb8-26dea315dda8", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-558830138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f5ed5655bc84a88bf9b4b42b4a3b1e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc845e3-654b-43c6-acea-dde1084f0ad0", "external-id": "nsx-vlan-transportzone-344", "segmentation_id": 344, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa90f2990-e0", "ovs_interfaceid": "a90f2990-e03b-4fce-b01d-d1fe57be7c57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 999.346513] env[68674]: DEBUG oslo_concurrency.lockutils [req-2310ff07-3f9c-4b60-af32-e0ef82851da4 req-07bb6fb2-4188-4d7c-9262-f73562591873 service nova] Acquired lock "refresh_cache-f6f5fb73-521a-4c83-93ea-a1eb2af2e142" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 999.346730] env[68674]: DEBUG nova.network.neutron [req-2310ff07-3f9c-4b60-af32-e0ef82851da4 req-07bb6fb2-4188-4d7c-9262-f73562591873 service nova] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Refreshing network info cache for port a90f2990-e03b-4fce-b01d-d1fe57be7c57 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 999.348812] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cc:b9:14', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccc845e3-654b-43c6-acea-dde1084f0ad0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a90f2990-e03b-4fce-b01d-d1fe57be7c57', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 999.358556] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Creating folder: Project (5f5ed5655bc84a88bf9b4b42b4a3b1e4). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 999.359586] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dfffef05-2bb2-4769-8d30-0a54446ad4fd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.376133] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Created folder: Project (5f5ed5655bc84a88bf9b4b42b4a3b1e4) in parent group-v647377. [ 999.376353] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Creating folder: Instances. Parent ref: group-v647645. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 999.376605] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-195e3da2-0376-4b58-b89b-a8fdeda2c92a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.389930] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Created folder: Instances in parent group-v647645. [ 999.390218] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 999.390418] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 999.390634] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fe0efc7d-3b1c-45ca-87d8-17d78bc8035a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.414873] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 999.414873] env[68674]: value = "task-3240568" [ 999.414873] env[68674]: _type = "Task" [ 999.414873] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.424129] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240568, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.648990] env[68674]: DEBUG oslo_vmware.api [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240565, 'name': RemoveSnapshot_Task} progress is 84%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.662981] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d9e530f6-87ce-409b-9d98-50898ab84a46 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "ffdd1c62-1b4e-40cf-a27e-ff2877439701" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 999.663602] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d9e530f6-87ce-409b-9d98-50898ab84a46 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "ffdd1c62-1b4e-40cf-a27e-ff2877439701" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 999.663665] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d9e530f6-87ce-409b-9d98-50898ab84a46 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "ffdd1c62-1b4e-40cf-a27e-ff2877439701-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 999.664128] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d9e530f6-87ce-409b-9d98-50898ab84a46 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "ffdd1c62-1b4e-40cf-a27e-ff2877439701-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 999.664128] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d9e530f6-87ce-409b-9d98-50898ab84a46 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "ffdd1c62-1b4e-40cf-a27e-ff2877439701-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 999.668273] env[68674]: INFO nova.compute.manager [None req-d9e530f6-87ce-409b-9d98-50898ab84a46 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Terminating instance [ 999.818691] env[68674]: DEBUG nova.compute.utils [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 999.820605] env[68674]: DEBUG nova.compute.manager [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 999.820605] env[68674]: DEBUG nova.network.neutron [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 999.831846] env[68674]: DEBUG nova.compute.manager [None req-8ffb2324-d766-4c78-bb81-70c900b73d95 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 999.832117] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8ffb2324-d766-4c78-bb81-70c900b73d95 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 999.832871] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59902e86-8afa-498d-998e-80b58d6de631 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.846169] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ffb2324-d766-4c78-bb81-70c900b73d95 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 999.846324] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5ea12693-0f0d-4625-be1b-5eacecc2a54e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.855243] env[68674]: DEBUG oslo_vmware.api [None req-8ffb2324-d766-4c78-bb81-70c900b73d95 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Waiting for the task: (returnval){ [ 999.855243] env[68674]: value = "task-3240569" [ 999.855243] env[68674]: _type = "Task" [ 999.855243] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.870876] env[68674]: DEBUG oslo_vmware.api [None req-8ffb2324-d766-4c78-bb81-70c900b73d95 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Task: {'id': task-3240569, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.895871] env[68674]: DEBUG nova.policy [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '34b9ef7eae4a4cceba2fa699ce38ac0d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6f2a133c72064227bd419d63d5d9557f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 999.932692] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240568, 'name': CreateVM_Task, 'duration_secs': 0.415887} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.933111] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 999.934042] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.934553] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 999.934968] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 999.935117] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-883e27d5-c116-40d4-aa38-c4d0ee36bc5d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.946582] env[68674]: DEBUG oslo_vmware.api [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Waiting for the task: (returnval){ [ 999.946582] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a51ac7-fd41-d589-7d0b-053c4d1ba7e8" [ 999.946582] env[68674]: _type = "Task" [ 999.946582] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.957288] env[68674]: DEBUG oslo_vmware.api [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a51ac7-fd41-d589-7d0b-053c4d1ba7e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.987023] env[68674]: DEBUG nova.network.neutron [-] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.149186] env[68674]: DEBUG oslo_vmware.api [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240565, 'name': RemoveSnapshot_Task, 'duration_secs': 1.144618} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.149535] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Deleted Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1000.149798] env[68674]: INFO nova.compute.manager [None req-6ad91315-00ba-4059-bea0-cbd411ea41b1 tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Took 17.19 seconds to snapshot the instance on the hypervisor. [ 1000.173911] env[68674]: DEBUG nova.compute.manager [None req-d9e530f6-87ce-409b-9d98-50898ab84a46 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1000.174331] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d9e530f6-87ce-409b-9d98-50898ab84a46 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1000.175512] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-329161db-c93a-4be1-9132-f184fd8b08ec {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.188798] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9e530f6-87ce-409b-9d98-50898ab84a46 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1000.188798] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e37ae2fb-098e-4817-bd49-611babf8f919 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.199501] env[68674]: DEBUG oslo_vmware.api [None req-d9e530f6-87ce-409b-9d98-50898ab84a46 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 1000.199501] env[68674]: value = "task-3240570" [ 1000.199501] env[68674]: _type = "Task" [ 1000.199501] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.212878] env[68674]: DEBUG oslo_vmware.api [None req-d9e530f6-87ce-409b-9d98-50898ab84a46 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240570, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.322450] env[68674]: DEBUG nova.network.neutron [req-2310ff07-3f9c-4b60-af32-e0ef82851da4 req-07bb6fb2-4188-4d7c-9262-f73562591873 service nova] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Updated VIF entry in instance network info cache for port a90f2990-e03b-4fce-b01d-d1fe57be7c57. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1000.322830] env[68674]: DEBUG nova.network.neutron [req-2310ff07-3f9c-4b60-af32-e0ef82851da4 req-07bb6fb2-4188-4d7c-9262-f73562591873 service nova] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Updating instance_info_cache with network_info: [{"id": "a90f2990-e03b-4fce-b01d-d1fe57be7c57", "address": "fa:16:3e:cc:b9:14", "network": {"id": "81ca1306-cf11-4d45-9bb8-26dea315dda8", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-558830138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f5ed5655bc84a88bf9b4b42b4a3b1e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc845e3-654b-43c6-acea-dde1084f0ad0", "external-id": "nsx-vlan-transportzone-344", "segmentation_id": 344, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa90f2990-e0", "ovs_interfaceid": "a90f2990-e03b-4fce-b01d-d1fe57be7c57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.327703] env[68674]: DEBUG nova.compute.manager [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1000.374065] env[68674]: DEBUG oslo_vmware.api [None req-8ffb2324-d766-4c78-bb81-70c900b73d95 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Task: {'id': task-3240569, 'name': PowerOffVM_Task, 'duration_secs': 0.236989} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.374065] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ffb2324-d766-4c78-bb81-70c900b73d95 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1000.374065] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8ffb2324-d766-4c78-bb81-70c900b73d95 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1000.374065] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-081ea623-ede5-4526-8f7d-aff40099d295 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.455048] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8ffb2324-d766-4c78-bb81-70c900b73d95 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1000.455048] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8ffb2324-d766-4c78-bb81-70c900b73d95 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1000.455048] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ffb2324-d766-4c78-bb81-70c900b73d95 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Deleting the datastore file [datastore2] 7a13c52a-328a-4baa-827f-4f2e9cd29269 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1000.455048] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0e030253-b96a-4d5a-8c9a-ec1fcf48e0fc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.461034] env[68674]: DEBUG oslo_vmware.api [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a51ac7-fd41-d589-7d0b-053c4d1ba7e8, 'name': SearchDatastore_Task, 'duration_secs': 0.012226} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.461671] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1000.462283] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1000.462526] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.462671] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1000.462846] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1000.463128] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-faa71577-06c0-4d1a-bbd4-bcfb691a2c00 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.466587] env[68674]: DEBUG oslo_vmware.api [None req-8ffb2324-d766-4c78-bb81-70c900b73d95 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Waiting for the task: (returnval){ [ 1000.466587] env[68674]: value = "task-3240572" [ 1000.466587] env[68674]: _type = "Task" [ 1000.466587] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.475849] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1000.475849] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1000.476701] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e23acda-8e09-44b2-bf62-fd313650f5cf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.482611] env[68674]: DEBUG oslo_vmware.api [None req-8ffb2324-d766-4c78-bb81-70c900b73d95 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Task: {'id': task-3240572, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.487520] env[68674]: INFO nova.compute.manager [-] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Took 1.63 seconds to deallocate network for instance. [ 1000.487872] env[68674]: DEBUG oslo_vmware.api [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Waiting for the task: (returnval){ [ 1000.487872] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5226ad4e-6531-3469-db75-b91e727e8cae" [ 1000.487872] env[68674]: _type = "Task" [ 1000.487872] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.506873] env[68674]: DEBUG oslo_vmware.api [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5226ad4e-6531-3469-db75-b91e727e8cae, 'name': SearchDatastore_Task, 'duration_secs': 0.014741} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.507710] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23a34993-5375-4bf2-a251-a632ac3d5c5a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.514032] env[68674]: DEBUG oslo_vmware.api [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Waiting for the task: (returnval){ [ 1000.514032] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52882b4b-e675-79ea-3e9d-68f48cf72d03" [ 1000.514032] env[68674]: _type = "Task" [ 1000.514032] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.530475] env[68674]: DEBUG oslo_vmware.api [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52882b4b-e675-79ea-3e9d-68f48cf72d03, 'name': SearchDatastore_Task, 'duration_secs': 0.01127} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.530749] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1000.531360] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] f6f5fb73-521a-4c83-93ea-a1eb2af2e142/f6f5fb73-521a-4c83-93ea-a1eb2af2e142.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1000.531655] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6bf33827-6da4-42c9-9b17-5b761274cfe0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.537934] env[68674]: DEBUG nova.network.neutron [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Successfully created port: fa6ef6fe-e229-4cc2-8230-7318adaa728e {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1000.549510] env[68674]: DEBUG oslo_vmware.api [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Waiting for the task: (returnval){ [ 1000.549510] env[68674]: value = "task-3240573" [ 1000.549510] env[68674]: _type = "Task" [ 1000.549510] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.550843] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "79ee95b6-7321-4e33-a0e4-2c8ed1bc1031" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1000.550905] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "79ee95b6-7321-4e33-a0e4-2c8ed1bc1031" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1000.568871] env[68674]: DEBUG oslo_vmware.api [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Task: {'id': task-3240573, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.714372] env[68674]: DEBUG oslo_vmware.api [None req-d9e530f6-87ce-409b-9d98-50898ab84a46 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240570, 'name': PowerOffVM_Task, 'duration_secs': 0.206904} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.714711] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9e530f6-87ce-409b-9d98-50898ab84a46 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1000.715040] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d9e530f6-87ce-409b-9d98-50898ab84a46 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1000.715338] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-305bc597-495b-4001-8c20-cce7be3c1f4e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.757196] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9866be0-cb09-4e98-9220-bd523d7caa67 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.768131] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31fe6241-beee-4c24-863d-613107aa0934 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.813048] env[68674]: DEBUG oslo_concurrency.lockutils [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "66f4ab32-ef66-4d1d-93b6-775d59ce3c41" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1000.813048] env[68674]: DEBUG oslo_concurrency.lockutils [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "66f4ab32-ef66-4d1d-93b6-775d59ce3c41" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1000.818222] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c507264-bae7-40b2-b671-ee06ab3ae9c9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.821913] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d9e530f6-87ce-409b-9d98-50898ab84a46 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1000.822432] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d9e530f6-87ce-409b-9d98-50898ab84a46 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1000.822755] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9e530f6-87ce-409b-9d98-50898ab84a46 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Deleting the datastore file [datastore2] ffdd1c62-1b4e-40cf-a27e-ff2877439701 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1000.823621] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fc43ede6-509f-45e2-ab74-118bdd3c7dc7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.831461] env[68674]: DEBUG oslo_concurrency.lockutils [req-2310ff07-3f9c-4b60-af32-e0ef82851da4 req-07bb6fb2-4188-4d7c-9262-f73562591873 service nova] Releasing lock "refresh_cache-f6f5fb73-521a-4c83-93ea-a1eb2af2e142" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1000.843203] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51ac8fa6-5518-4bea-8762-f9c06f5ac3d7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.848038] env[68674]: DEBUG oslo_vmware.api [None req-d9e530f6-87ce-409b-9d98-50898ab84a46 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 1000.848038] env[68674]: value = "task-3240575" [ 1000.848038] env[68674]: _type = "Task" [ 1000.848038] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.862976] env[68674]: DEBUG nova.compute.provider_tree [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1000.872234] env[68674]: DEBUG oslo_vmware.api [None req-d9e530f6-87ce-409b-9d98-50898ab84a46 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240575, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.982026] env[68674]: DEBUG oslo_vmware.api [None req-8ffb2324-d766-4c78-bb81-70c900b73d95 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Task: {'id': task-3240572, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.195777} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.982026] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ffb2324-d766-4c78-bb81-70c900b73d95 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1000.982026] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8ffb2324-d766-4c78-bb81-70c900b73d95 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1000.982026] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8ffb2324-d766-4c78-bb81-70c900b73d95 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1000.982026] env[68674]: INFO nova.compute.manager [None req-8ffb2324-d766-4c78-bb81-70c900b73d95 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1000.982026] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8ffb2324-d766-4c78-bb81-70c900b73d95 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1000.982026] env[68674]: DEBUG nova.compute.manager [-] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1000.982026] env[68674]: DEBUG nova.network.neutron [-] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1001.000683] env[68674]: DEBUG oslo_concurrency.lockutils [None req-89de4556-077a-44ea-a2fa-f34dde0ed400 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1001.059735] env[68674]: DEBUG nova.compute.manager [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1001.070908] env[68674]: DEBUG oslo_vmware.api [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Task: {'id': task-3240573, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.525186} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.072293] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] f6f5fb73-521a-4c83-93ea-a1eb2af2e142/f6f5fb73-521a-4c83-93ea-a1eb2af2e142.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1001.072293] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1001.073355] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cf97a2b8-54a2-4000-bf27-ac2187c84249 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.080917] env[68674]: DEBUG oslo_vmware.api [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Waiting for the task: (returnval){ [ 1001.080917] env[68674]: value = "task-3240576" [ 1001.080917] env[68674]: _type = "Task" [ 1001.080917] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.091650] env[68674]: DEBUG oslo_vmware.api [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Task: {'id': task-3240576, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.316510] env[68674]: DEBUG nova.compute.manager [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1001.350507] env[68674]: DEBUG nova.compute.manager [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1001.368367] env[68674]: DEBUG nova.scheduler.client.report [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1001.374812] env[68674]: DEBUG oslo_vmware.api [None req-d9e530f6-87ce-409b-9d98-50898ab84a46 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240575, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.308632} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.374812] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9e530f6-87ce-409b-9d98-50898ab84a46 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1001.374812] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d9e530f6-87ce-409b-9d98-50898ab84a46 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1001.374812] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d9e530f6-87ce-409b-9d98-50898ab84a46 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1001.374812] env[68674]: INFO nova.compute.manager [None req-d9e530f6-87ce-409b-9d98-50898ab84a46 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1001.374812] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d9e530f6-87ce-409b-9d98-50898ab84a46 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1001.375515] env[68674]: DEBUG nova.compute.manager [-] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1001.375649] env[68674]: DEBUG nova.network.neutron [-] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1001.390437] env[68674]: DEBUG nova.virt.hardware [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1001.391197] env[68674]: DEBUG nova.virt.hardware [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1001.391197] env[68674]: DEBUG nova.virt.hardware [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1001.391197] env[68674]: DEBUG nova.virt.hardware [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1001.391331] env[68674]: DEBUG nova.virt.hardware [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1001.391444] env[68674]: DEBUG nova.virt.hardware [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1001.391581] env[68674]: DEBUG nova.virt.hardware [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1001.391740] env[68674]: DEBUG nova.virt.hardware [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1001.391903] env[68674]: DEBUG nova.virt.hardware [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1001.392081] env[68674]: DEBUG nova.virt.hardware [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1001.392261] env[68674]: DEBUG nova.virt.hardware [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1001.394139] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b40a5734-e9e4-4e53-8d3b-ca5b86a984d0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.406693] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60bf1005-ca08-4e73-bbae-972bac59f5d8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.460401] env[68674]: DEBUG nova.compute.manager [req-cd355957-682c-4ca1-b742-779db6127fbb req-cd8354a6-d68b-409e-bdef-d0a0353d8786 service nova] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Received event network-vif-deleted-7eeedd9b-3b07-43ac-a36b-37a42aa42280 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1001.541239] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9bb376f2-4499-45d7-b714-96c00ee536fd tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Acquiring lock "a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1001.542691] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9bb376f2-4499-45d7-b714-96c00ee536fd tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Lock "a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1001.542691] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9bb376f2-4499-45d7-b714-96c00ee536fd tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Acquiring lock "a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1001.542691] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9bb376f2-4499-45d7-b714-96c00ee536fd tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Lock "a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1001.542691] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9bb376f2-4499-45d7-b714-96c00ee536fd tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Lock "a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1001.544843] env[68674]: INFO nova.compute.manager [None req-9bb376f2-4499-45d7-b714-96c00ee536fd tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Terminating instance [ 1001.590690] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1001.595829] env[68674]: DEBUG oslo_vmware.api [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Task: {'id': task-3240576, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.127285} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.596166] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1001.596989] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddfdbf01-6639-4715-ba6d-63c743150bcb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.625875] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] f6f5fb73-521a-4c83-93ea-a1eb2af2e142/f6f5fb73-521a-4c83-93ea-a1eb2af2e142.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1001.626694] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8b1dc080-1429-468c-b44b-dc91a36c510f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.651026] env[68674]: DEBUG oslo_vmware.api [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Waiting for the task: (returnval){ [ 1001.651026] env[68674]: value = "task-3240577" [ 1001.651026] env[68674]: _type = "Task" [ 1001.651026] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.660388] env[68674]: DEBUG oslo_vmware.api [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Task: {'id': task-3240577, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.792479] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquiring lock "142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1001.792479] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1001.840162] env[68674]: DEBUG oslo_concurrency.lockutils [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1001.876882] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.560s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1001.877516] env[68674]: DEBUG nova.compute.manager [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1001.881023] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.356s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1001.885649] env[68674]: INFO nova.compute.claims [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1001.885649] env[68674]: DEBUG nova.network.neutron [-] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1002.050818] env[68674]: DEBUG nova.compute.manager [None req-9bb376f2-4499-45d7-b714-96c00ee536fd tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1002.050818] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9bb376f2-4499-45d7-b714-96c00ee536fd tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1002.050818] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa52ef31-0016-4989-b001-70d18a9d2743 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.065885] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bb376f2-4499-45d7-b714-96c00ee536fd tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1002.065885] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-41c4efcf-ded4-4eee-b38d-16c423560d96 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.075716] env[68674]: DEBUG oslo_vmware.api [None req-9bb376f2-4499-45d7-b714-96c00ee536fd tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Waiting for the task: (returnval){ [ 1002.075716] env[68674]: value = "task-3240578" [ 1002.075716] env[68674]: _type = "Task" [ 1002.075716] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.086191] env[68674]: DEBUG oslo_vmware.api [None req-9bb376f2-4499-45d7-b714-96c00ee536fd tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240578, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.162568] env[68674]: DEBUG oslo_vmware.api [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Task: {'id': task-3240577, 'name': ReconfigVM_Task, 'duration_secs': 0.327943} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.166019] env[68674]: DEBUG nova.network.neutron [-] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1002.166019] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Reconfigured VM instance instance-0000005c to attach disk [datastore1] f6f5fb73-521a-4c83-93ea-a1eb2af2e142/f6f5fb73-521a-4c83-93ea-a1eb2af2e142.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1002.166019] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3dc6dbbc-089d-490e-a6c3-84dba54356f8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.177319] env[68674]: DEBUG oslo_vmware.api [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Waiting for the task: (returnval){ [ 1002.177319] env[68674]: value = "task-3240579" [ 1002.177319] env[68674]: _type = "Task" [ 1002.177319] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.185848] env[68674]: DEBUG oslo_vmware.api [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Task: {'id': task-3240579, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.293829] env[68674]: DEBUG nova.compute.manager [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1002.384026] env[68674]: DEBUG nova.compute.utils [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1002.384815] env[68674]: DEBUG nova.compute.manager [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1002.385013] env[68674]: DEBUG nova.network.neutron [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1002.389842] env[68674]: INFO nova.compute.manager [-] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Took 1.41 seconds to deallocate network for instance. [ 1002.532155] env[68674]: DEBUG nova.policy [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e268da8edd47413b9b87909dde064f64', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0cee54e456084086866d08b098a24b64', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 1002.565256] env[68674]: DEBUG nova.network.neutron [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Successfully updated port: fa6ef6fe-e229-4cc2-8230-7318adaa728e {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1002.588746] env[68674]: DEBUG oslo_vmware.api [None req-9bb376f2-4499-45d7-b714-96c00ee536fd tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240578, 'name': PowerOffVM_Task, 'duration_secs': 0.239673} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.589123] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bb376f2-4499-45d7-b714-96c00ee536fd tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1002.589363] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9bb376f2-4499-45d7-b714-96c00ee536fd tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1002.589687] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dd907a0f-51f1-410f-b821-3796d1dc345e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.665130] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9bb376f2-4499-45d7-b714-96c00ee536fd tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1002.665130] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9bb376f2-4499-45d7-b714-96c00ee536fd tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1002.665130] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bb376f2-4499-45d7-b714-96c00ee536fd tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Deleting the datastore file [datastore2] a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1002.665130] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-489e7886-43ed-4b88-a80e-f40705acb4c9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.666887] env[68674]: INFO nova.compute.manager [-] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Took 1.29 seconds to deallocate network for instance. [ 1002.675253] env[68674]: DEBUG oslo_vmware.api [None req-9bb376f2-4499-45d7-b714-96c00ee536fd tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Waiting for the task: (returnval){ [ 1002.675253] env[68674]: value = "task-3240581" [ 1002.675253] env[68674]: _type = "Task" [ 1002.675253] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.686771] env[68674]: DEBUG oslo_vmware.api [None req-9bb376f2-4499-45d7-b714-96c00ee536fd tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240581, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.690020] env[68674]: DEBUG oslo_vmware.api [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Task: {'id': task-3240579, 'name': Rename_Task, 'duration_secs': 0.216568} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.692620] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1002.692620] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5e0d4178-f40c-4053-a76c-f4ac4ca18457 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.700020] env[68674]: DEBUG oslo_vmware.api [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Waiting for the task: (returnval){ [ 1002.700020] env[68674]: value = "task-3240582" [ 1002.700020] env[68674]: _type = "Task" [ 1002.700020] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.706580] env[68674]: DEBUG oslo_vmware.api [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Task: {'id': task-3240582, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.834150] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1002.889233] env[68674]: DEBUG nova.compute.manager [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1002.905291] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8ffb2324-d766-4c78-bb81-70c900b73d95 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.032558] env[68674]: DEBUG nova.network.neutron [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Successfully created port: af3ba195-ad22-4557-b100-2d5c3fc527b2 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1003.068641] env[68674]: DEBUG oslo_concurrency.lockutils [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "refresh_cache-30731a3c-34ba-40c8-9b8f-2d867eff4f21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.068641] env[68674]: DEBUG oslo_concurrency.lockutils [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquired lock "refresh_cache-30731a3c-34ba-40c8-9b8f-2d867eff4f21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1003.068641] env[68674]: DEBUG nova.network.neutron [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1003.174176] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d9e530f6-87ce-409b-9d98-50898ab84a46 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.187554] env[68674]: DEBUG oslo_vmware.api [None req-9bb376f2-4499-45d7-b714-96c00ee536fd tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Task: {'id': task-3240581, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.187938} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.188218] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bb376f2-4499-45d7-b714-96c00ee536fd tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1003.188470] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9bb376f2-4499-45d7-b714-96c00ee536fd tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1003.188641] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9bb376f2-4499-45d7-b714-96c00ee536fd tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1003.188910] env[68674]: INFO nova.compute.manager [None req-9bb376f2-4499-45d7-b714-96c00ee536fd tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1003.189076] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9bb376f2-4499-45d7-b714-96c00ee536fd tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1003.189280] env[68674]: DEBUG nova.compute.manager [-] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1003.189377] env[68674]: DEBUG nova.network.neutron [-] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1003.221555] env[68674]: DEBUG oslo_vmware.api [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Task: {'id': task-3240582, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.297330] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ec1d9e7-28fb-46fa-8878-15920e69baf1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.305968] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-174e918c-a163-45b4-a533-95cd94c4b1ba {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.340307] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e4f2b1f-170f-428b-9048-b4aaa825a118 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.349330] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-318713e9-c6bb-4d12-930c-58f92303758b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.365558] env[68674]: DEBUG nova.compute.provider_tree [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1003.650907] env[68674]: DEBUG nova.compute.manager [req-9af1a4cb-d460-4e03-96b0-b5b42aa2417c req-d8dc22e0-2f1f-4185-99b2-88f1b6e60a8c service nova] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Received event network-vif-deleted-90c050b9-8e8d-439c-b41f-874750aa76ab {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1003.650907] env[68674]: DEBUG nova.compute.manager [req-9af1a4cb-d460-4e03-96b0-b5b42aa2417c req-d8dc22e0-2f1f-4185-99b2-88f1b6e60a8c service nova] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Received event network-vif-deleted-88af4dfc-59d6-4564-9ca9-d5383ed87da6 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1003.650907] env[68674]: DEBUG nova.compute.manager [req-9af1a4cb-d460-4e03-96b0-b5b42aa2417c req-d8dc22e0-2f1f-4185-99b2-88f1b6e60a8c service nova] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Received event network-vif-plugged-fa6ef6fe-e229-4cc2-8230-7318adaa728e {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1003.651374] env[68674]: DEBUG oslo_concurrency.lockutils [req-9af1a4cb-d460-4e03-96b0-b5b42aa2417c req-d8dc22e0-2f1f-4185-99b2-88f1b6e60a8c service nova] Acquiring lock "30731a3c-34ba-40c8-9b8f-2d867eff4f21-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.651595] env[68674]: DEBUG oslo_concurrency.lockutils [req-9af1a4cb-d460-4e03-96b0-b5b42aa2417c req-d8dc22e0-2f1f-4185-99b2-88f1b6e60a8c service nova] Lock "30731a3c-34ba-40c8-9b8f-2d867eff4f21-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1003.651836] env[68674]: DEBUG oslo_concurrency.lockutils [req-9af1a4cb-d460-4e03-96b0-b5b42aa2417c req-d8dc22e0-2f1f-4185-99b2-88f1b6e60a8c service nova] Lock "30731a3c-34ba-40c8-9b8f-2d867eff4f21-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1003.652075] env[68674]: DEBUG nova.compute.manager [req-9af1a4cb-d460-4e03-96b0-b5b42aa2417c req-d8dc22e0-2f1f-4185-99b2-88f1b6e60a8c service nova] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] No waiting events found dispatching network-vif-plugged-fa6ef6fe-e229-4cc2-8230-7318adaa728e {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1003.652318] env[68674]: WARNING nova.compute.manager [req-9af1a4cb-d460-4e03-96b0-b5b42aa2417c req-d8dc22e0-2f1f-4185-99b2-88f1b6e60a8c service nova] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Received unexpected event network-vif-plugged-fa6ef6fe-e229-4cc2-8230-7318adaa728e for instance with vm_state building and task_state spawning. [ 1003.652551] env[68674]: DEBUG nova.compute.manager [req-9af1a4cb-d460-4e03-96b0-b5b42aa2417c req-d8dc22e0-2f1f-4185-99b2-88f1b6e60a8c service nova] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Received event network-changed-fa6ef6fe-e229-4cc2-8230-7318adaa728e {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1003.652769] env[68674]: DEBUG nova.compute.manager [req-9af1a4cb-d460-4e03-96b0-b5b42aa2417c req-d8dc22e0-2f1f-4185-99b2-88f1b6e60a8c service nova] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Refreshing instance network info cache due to event network-changed-fa6ef6fe-e229-4cc2-8230-7318adaa728e. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1003.652986] env[68674]: DEBUG oslo_concurrency.lockutils [req-9af1a4cb-d460-4e03-96b0-b5b42aa2417c req-d8dc22e0-2f1f-4185-99b2-88f1b6e60a8c service nova] Acquiring lock "refresh_cache-30731a3c-34ba-40c8-9b8f-2d867eff4f21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.654158] env[68674]: DEBUG nova.network.neutron [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1003.713849] env[68674]: DEBUG oslo_vmware.api [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Task: {'id': task-3240582, 'name': PowerOnVM_Task, 'duration_secs': 0.595019} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.714116] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1003.714568] env[68674]: INFO nova.compute.manager [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Took 7.34 seconds to spawn the instance on the hypervisor. [ 1003.714933] env[68674]: DEBUG nova.compute.manager [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1003.716469] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90863752-2909-411a-9b2c-ddad7dbd98d9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.873746] env[68674]: DEBUG nova.scheduler.client.report [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1003.901215] env[68674]: DEBUG nova.compute.manager [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1003.914591] env[68674]: DEBUG nova.network.neutron [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Updating instance_info_cache with network_info: [{"id": "fa6ef6fe-e229-4cc2-8230-7318adaa728e", "address": "fa:16:3e:6c:f7:a2", "network": {"id": "f2a6b57a-fec9-4bd2-9828-2b72f21f2393", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1479923638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f2a133c72064227bd419d63d5d9557f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa6ef6fe-e2", "ovs_interfaceid": "fa6ef6fe-e229-4cc2-8230-7318adaa728e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.946085] env[68674]: DEBUG nova.virt.hardware [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1003.946353] env[68674]: DEBUG nova.virt.hardware [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1003.946543] env[68674]: DEBUG nova.virt.hardware [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1003.946772] env[68674]: DEBUG nova.virt.hardware [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1003.946945] env[68674]: DEBUG nova.virt.hardware [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1003.947111] env[68674]: DEBUG nova.virt.hardware [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1003.947326] env[68674]: DEBUG nova.virt.hardware [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1003.947488] env[68674]: DEBUG nova.virt.hardware [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1003.947654] env[68674]: DEBUG nova.virt.hardware [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1003.947814] env[68674]: DEBUG nova.virt.hardware [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1003.947985] env[68674]: DEBUG nova.virt.hardware [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1003.949193] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c41b0d-a3f7-403f-bc9e-b003a96ca39e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.958872] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e43e3cb-8195-4413-9deb-ae2310ee0f0c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.240645] env[68674]: INFO nova.compute.manager [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Took 27.60 seconds to build instance. [ 1004.241582] env[68674]: DEBUG nova.network.neutron [-] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.379021] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.498s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1004.379742] env[68674]: DEBUG nova.compute.manager [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1004.383735] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.168s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1004.385661] env[68674]: INFO nova.compute.claims [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1004.416511] env[68674]: DEBUG oslo_concurrency.lockutils [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Releasing lock "refresh_cache-30731a3c-34ba-40c8-9b8f-2d867eff4f21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1004.416865] env[68674]: DEBUG nova.compute.manager [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Instance network_info: |[{"id": "fa6ef6fe-e229-4cc2-8230-7318adaa728e", "address": "fa:16:3e:6c:f7:a2", "network": {"id": "f2a6b57a-fec9-4bd2-9828-2b72f21f2393", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1479923638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f2a133c72064227bd419d63d5d9557f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa6ef6fe-e2", "ovs_interfaceid": "fa6ef6fe-e229-4cc2-8230-7318adaa728e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1004.417308] env[68674]: DEBUG oslo_concurrency.lockutils [req-9af1a4cb-d460-4e03-96b0-b5b42aa2417c req-d8dc22e0-2f1f-4185-99b2-88f1b6e60a8c service nova] Acquired lock "refresh_cache-30731a3c-34ba-40c8-9b8f-2d867eff4f21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1004.417386] env[68674]: DEBUG nova.network.neutron [req-9af1a4cb-d460-4e03-96b0-b5b42aa2417c req-d8dc22e0-2f1f-4185-99b2-88f1b6e60a8c service nova] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Refreshing network info cache for port fa6ef6fe-e229-4cc2-8230-7318adaa728e {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1004.418611] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6c:f7:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8868dc2-7767-49c0-a2ed-e611fcbf8414', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fa6ef6fe-e229-4cc2-8230-7318adaa728e', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1004.428128] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Creating folder: Project (6f2a133c72064227bd419d63d5d9557f). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1004.432712] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9173bdf3-7238-4da4-8c6c-fe1fa6e7f65c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.446637] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Created folder: Project (6f2a133c72064227bd419d63d5d9557f) in parent group-v647377. [ 1004.446978] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Creating folder: Instances. Parent ref: group-v647648. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1004.447353] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dd89a823-8d88-432b-8f9b-913e1cad9fdd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.461646] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Created folder: Instances in parent group-v647648. [ 1004.461942] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1004.462164] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1004.462550] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bc027da5-e7da-4c4f-9e70-fb2b5381f5b2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.492532] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1004.492532] env[68674]: value = "task-3240585" [ 1004.492532] env[68674]: _type = "Task" [ 1004.492532] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.501928] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240585, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.743887] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6a285218-c1a8-4841-8e90-b7341fb1ba23 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Lock "f6f5fb73-521a-4c83-93ea-a1eb2af2e142" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.115s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1004.747792] env[68674]: INFO nova.compute.manager [-] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Took 1.56 seconds to deallocate network for instance. [ 1004.893086] env[68674]: DEBUG nova.compute.utils [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1004.897806] env[68674]: DEBUG nova.compute.manager [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Not allocating networking since 'none' was specified. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1004.993029] env[68674]: DEBUG nova.network.neutron [req-9af1a4cb-d460-4e03-96b0-b5b42aa2417c req-d8dc22e0-2f1f-4185-99b2-88f1b6e60a8c service nova] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Updated VIF entry in instance network info cache for port fa6ef6fe-e229-4cc2-8230-7318adaa728e. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1004.993507] env[68674]: DEBUG nova.network.neutron [req-9af1a4cb-d460-4e03-96b0-b5b42aa2417c req-d8dc22e0-2f1f-4185-99b2-88f1b6e60a8c service nova] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Updating instance_info_cache with network_info: [{"id": "fa6ef6fe-e229-4cc2-8230-7318adaa728e", "address": "fa:16:3e:6c:f7:a2", "network": {"id": "f2a6b57a-fec9-4bd2-9828-2b72f21f2393", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1479923638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f2a133c72064227bd419d63d5d9557f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa6ef6fe-e2", "ovs_interfaceid": "fa6ef6fe-e229-4cc2-8230-7318adaa728e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.009915] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240585, 'name': CreateVM_Task, 'duration_secs': 0.405944} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.010557] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1005.011352] env[68674]: DEBUG oslo_concurrency.lockutils [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.011528] env[68674]: DEBUG oslo_concurrency.lockutils [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1005.011847] env[68674]: DEBUG oslo_concurrency.lockutils [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1005.012123] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a19d00d-3583-4755-a9f1-ffc82b00c86f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.020445] env[68674]: DEBUG oslo_vmware.api [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1005.020445] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a59c26-50db-f812-ea26-0af7d4ecff81" [ 1005.020445] env[68674]: _type = "Task" [ 1005.020445] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.031079] env[68674]: DEBUG oslo_vmware.api [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a59c26-50db-f812-ea26-0af7d4ecff81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.256140] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9bb376f2-4499-45d7-b714-96c00ee536fd tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1005.301260] env[68674]: DEBUG nova.network.neutron [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Successfully updated port: af3ba195-ad22-4557-b100-2d5c3fc527b2 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1005.403022] env[68674]: DEBUG nova.compute.manager [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1005.498511] env[68674]: DEBUG oslo_concurrency.lockutils [req-9af1a4cb-d460-4e03-96b0-b5b42aa2417c req-d8dc22e0-2f1f-4185-99b2-88f1b6e60a8c service nova] Releasing lock "refresh_cache-30731a3c-34ba-40c8-9b8f-2d867eff4f21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1005.535524] env[68674]: DEBUG oslo_vmware.api [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a59c26-50db-f812-ea26-0af7d4ecff81, 'name': SearchDatastore_Task, 'duration_secs': 0.011821} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.535886] env[68674]: DEBUG oslo_concurrency.lockutils [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1005.536137] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1005.536379] env[68674]: DEBUG oslo_concurrency.lockutils [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.536533] env[68674]: DEBUG oslo_concurrency.lockutils [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1005.536713] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1005.536977] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b9072bbe-902c-49e7-86d3-4c8c2799dec9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.552775] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1005.552877] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1005.556786] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed5d35bb-0088-41fc-8ce7-164e3707dbf4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.563378] env[68674]: DEBUG oslo_vmware.api [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1005.563378] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5288aac4-c038-5633-00e6-4e11095c4e18" [ 1005.563378] env[68674]: _type = "Task" [ 1005.563378] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.576211] env[68674]: DEBUG oslo_vmware.api [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5288aac4-c038-5633-00e6-4e11095c4e18, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.693746] env[68674]: DEBUG nova.compute.manager [req-b4285024-81c5-4d74-81ca-4b49ca2de961 req-4c38a1a6-f1e1-45a6-9b2a-17f7a720552b service nova] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Received event network-vif-deleted-06217b92-0ccd-4eaf-be24-4bbd6e81f3a3 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1005.693937] env[68674]: DEBUG nova.compute.manager [req-b4285024-81c5-4d74-81ca-4b49ca2de961 req-4c38a1a6-f1e1-45a6-9b2a-17f7a720552b service nova] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Received event network-vif-plugged-af3ba195-ad22-4557-b100-2d5c3fc527b2 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1005.694142] env[68674]: DEBUG oslo_concurrency.lockutils [req-b4285024-81c5-4d74-81ca-4b49ca2de961 req-4c38a1a6-f1e1-45a6-9b2a-17f7a720552b service nova] Acquiring lock "8d810cc0-3f85-49c9-9d7d-8e1711a97015-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1005.694356] env[68674]: DEBUG oslo_concurrency.lockutils [req-b4285024-81c5-4d74-81ca-4b49ca2de961 req-4c38a1a6-f1e1-45a6-9b2a-17f7a720552b service nova] Lock "8d810cc0-3f85-49c9-9d7d-8e1711a97015-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1005.694558] env[68674]: DEBUG oslo_concurrency.lockutils [req-b4285024-81c5-4d74-81ca-4b49ca2de961 req-4c38a1a6-f1e1-45a6-9b2a-17f7a720552b service nova] Lock "8d810cc0-3f85-49c9-9d7d-8e1711a97015-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1005.694710] env[68674]: DEBUG nova.compute.manager [req-b4285024-81c5-4d74-81ca-4b49ca2de961 req-4c38a1a6-f1e1-45a6-9b2a-17f7a720552b service nova] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] No waiting events found dispatching network-vif-plugged-af3ba195-ad22-4557-b100-2d5c3fc527b2 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1005.694920] env[68674]: WARNING nova.compute.manager [req-b4285024-81c5-4d74-81ca-4b49ca2de961 req-4c38a1a6-f1e1-45a6-9b2a-17f7a720552b service nova] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Received unexpected event network-vif-plugged-af3ba195-ad22-4557-b100-2d5c3fc527b2 for instance with vm_state building and task_state spawning. [ 1005.695208] env[68674]: DEBUG nova.compute.manager [req-b4285024-81c5-4d74-81ca-4b49ca2de961 req-4c38a1a6-f1e1-45a6-9b2a-17f7a720552b service nova] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Received event network-changed-a90f2990-e03b-4fce-b01d-d1fe57be7c57 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1005.695308] env[68674]: DEBUG nova.compute.manager [req-b4285024-81c5-4d74-81ca-4b49ca2de961 req-4c38a1a6-f1e1-45a6-9b2a-17f7a720552b service nova] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Refreshing instance network info cache due to event network-changed-a90f2990-e03b-4fce-b01d-d1fe57be7c57. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1005.695616] env[68674]: DEBUG oslo_concurrency.lockutils [req-b4285024-81c5-4d74-81ca-4b49ca2de961 req-4c38a1a6-f1e1-45a6-9b2a-17f7a720552b service nova] Acquiring lock "refresh_cache-f6f5fb73-521a-4c83-93ea-a1eb2af2e142" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.695708] env[68674]: DEBUG oslo_concurrency.lockutils [req-b4285024-81c5-4d74-81ca-4b49ca2de961 req-4c38a1a6-f1e1-45a6-9b2a-17f7a720552b service nova] Acquired lock "refresh_cache-f6f5fb73-521a-4c83-93ea-a1eb2af2e142" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1005.695881] env[68674]: DEBUG nova.network.neutron [req-b4285024-81c5-4d74-81ca-4b49ca2de961 req-4c38a1a6-f1e1-45a6-9b2a-17f7a720552b service nova] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Refreshing network info cache for port a90f2990-e03b-4fce-b01d-d1fe57be7c57 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1005.747564] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bf3c229-3773-4774-82a8-779cfae367b4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.757018] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75bb62af-86f0-4c57-950f-f78eec67c60c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.793087] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-908a1d96-fc20-4342-a18b-212d63cc7ed6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.801440] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11bbd319-c7f2-4ed4-9d44-e960e492ab23 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.807512] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "refresh_cache-8d810cc0-3f85-49c9-9d7d-8e1711a97015" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.807512] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquired lock "refresh_cache-8d810cc0-3f85-49c9-9d7d-8e1711a97015" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1005.807512] env[68674]: DEBUG nova.network.neutron [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1005.818104] env[68674]: DEBUG nova.compute.provider_tree [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1006.074835] env[68674]: DEBUG oslo_vmware.api [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5288aac4-c038-5633-00e6-4e11095c4e18, 'name': SearchDatastore_Task, 'duration_secs': 0.011102} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.075760] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e873ebd-1723-440e-ae0a-bb777cda6084 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.081836] env[68674]: DEBUG oslo_vmware.api [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1006.081836] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]526dde79-9f18-d49e-eea2-a1387f3225c6" [ 1006.081836] env[68674]: _type = "Task" [ 1006.081836] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.090367] env[68674]: DEBUG oslo_vmware.api [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]526dde79-9f18-d49e-eea2-a1387f3225c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.323142] env[68674]: DEBUG nova.scheduler.client.report [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1006.382093] env[68674]: DEBUG nova.network.neutron [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1006.419179] env[68674]: DEBUG nova.compute.manager [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1006.450820] env[68674]: DEBUG nova.virt.hardware [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1006.451084] env[68674]: DEBUG nova.virt.hardware [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1006.451256] env[68674]: DEBUG nova.virt.hardware [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1006.451447] env[68674]: DEBUG nova.virt.hardware [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1006.451594] env[68674]: DEBUG nova.virt.hardware [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1006.451739] env[68674]: DEBUG nova.virt.hardware [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1006.451947] env[68674]: DEBUG nova.virt.hardware [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1006.452118] env[68674]: DEBUG nova.virt.hardware [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1006.452321] env[68674]: DEBUG nova.virt.hardware [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1006.452456] env[68674]: DEBUG nova.virt.hardware [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1006.452628] env[68674]: DEBUG nova.virt.hardware [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1006.453548] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2bf4df2-773e-46e4-9e4b-1865edf2e1d1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.466543] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7719824b-b531-4c90-ab38-1df28f842f57 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.480726] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Instance VIF info [] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1006.486421] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Creating folder: Project (d2ab9de530834056bbd390a8640cf1c2). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1006.489378] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c2dd9bfb-92a4-4b0d-ab1a-bc0df049a516 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.503977] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Created folder: Project (d2ab9de530834056bbd390a8640cf1c2) in parent group-v647377. [ 1006.504204] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Creating folder: Instances. Parent ref: group-v647651. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1006.504583] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8a1ee90b-765a-4cc1-a47e-d5437cf71810 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.520352] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Created folder: Instances in parent group-v647651. [ 1006.520617] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1006.520827] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1006.521288] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4b4614c4-4cb5-40b8-aca6-3ffcc8b36584 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.541980] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1006.541980] env[68674]: value = "task-3240588" [ 1006.541980] env[68674]: _type = "Task" [ 1006.541980] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.550998] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240588, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.593374] env[68674]: DEBUG oslo_vmware.api [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]526dde79-9f18-d49e-eea2-a1387f3225c6, 'name': SearchDatastore_Task, 'duration_secs': 0.01383} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.593709] env[68674]: DEBUG oslo_concurrency.lockutils [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1006.594016] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 30731a3c-34ba-40c8-9b8f-2d867eff4f21/30731a3c-34ba-40c8-9b8f-2d867eff4f21.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1006.594289] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eda91314-9491-4a63-875f-94b48041b0a9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.601752] env[68674]: DEBUG oslo_vmware.api [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1006.601752] env[68674]: value = "task-3240589" [ 1006.601752] env[68674]: _type = "Task" [ 1006.601752] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.611273] env[68674]: DEBUG oslo_vmware.api [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3240589, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.831264] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.445s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1006.831264] env[68674]: DEBUG nova.compute.manager [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1006.833688] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 9.172s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1006.834531] env[68674]: DEBUG nova.objects.instance [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68674) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1007.005981] env[68674]: DEBUG nova.network.neutron [req-b4285024-81c5-4d74-81ca-4b49ca2de961 req-4c38a1a6-f1e1-45a6-9b2a-17f7a720552b service nova] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Updated VIF entry in instance network info cache for port a90f2990-e03b-4fce-b01d-d1fe57be7c57. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1007.005981] env[68674]: DEBUG nova.network.neutron [req-b4285024-81c5-4d74-81ca-4b49ca2de961 req-4c38a1a6-f1e1-45a6-9b2a-17f7a720552b service nova] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Updating instance_info_cache with network_info: [{"id": "a90f2990-e03b-4fce-b01d-d1fe57be7c57", "address": "fa:16:3e:cc:b9:14", "network": {"id": "81ca1306-cf11-4d45-9bb8-26dea315dda8", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-558830138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f5ed5655bc84a88bf9b4b42b4a3b1e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc845e3-654b-43c6-acea-dde1084f0ad0", "external-id": "nsx-vlan-transportzone-344", "segmentation_id": 344, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa90f2990-e0", "ovs_interfaceid": "a90f2990-e03b-4fce-b01d-d1fe57be7c57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.008453] env[68674]: DEBUG nova.network.neutron [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Updating instance_info_cache with network_info: [{"id": "af3ba195-ad22-4557-b100-2d5c3fc527b2", "address": "fa:16:3e:59:6c:b8", "network": {"id": "c0c4733f-8d0b-4cee-883f-2ad57ed16158", "bridge": "br-int", "label": "tempest-ServersTestJSON-40114649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cee54e456084086866d08b098a24b64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf3ba195-ad", "ovs_interfaceid": "af3ba195-ad22-4557-b100-2d5c3fc527b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.060083] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240588, 'name': CreateVM_Task, 'duration_secs': 0.411087} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.060083] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1007.060083] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.060083] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1007.060083] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1007.060939] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4dddd19e-ac01-4ff1-8dc8-cf1c6423de70 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.068948] env[68674]: DEBUG oslo_vmware.api [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Waiting for the task: (returnval){ [ 1007.068948] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a25fd4-67db-f8e3-ca60-d0833e64e913" [ 1007.068948] env[68674]: _type = "Task" [ 1007.068948] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.079619] env[68674]: DEBUG oslo_vmware.api [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a25fd4-67db-f8e3-ca60-d0833e64e913, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.112590] env[68674]: DEBUG oslo_vmware.api [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3240589, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.339664] env[68674]: DEBUG nova.compute.utils [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1007.344079] env[68674]: DEBUG nova.compute.manager [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1007.344259] env[68674]: DEBUG nova.network.neutron [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1007.430194] env[68674]: DEBUG nova.policy [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7b3a4c2c5bae41998d58a116e648883d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aa34d6d90c6d46aaa2cb77259b5e0c27', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 1007.508704] env[68674]: DEBUG oslo_concurrency.lockutils [req-b4285024-81c5-4d74-81ca-4b49ca2de961 req-4c38a1a6-f1e1-45a6-9b2a-17f7a720552b service nova] Releasing lock "refresh_cache-f6f5fb73-521a-4c83-93ea-a1eb2af2e142" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1007.508996] env[68674]: DEBUG nova.compute.manager [req-b4285024-81c5-4d74-81ca-4b49ca2de961 req-4c38a1a6-f1e1-45a6-9b2a-17f7a720552b service nova] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Received event network-changed-af3ba195-ad22-4557-b100-2d5c3fc527b2 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1007.509184] env[68674]: DEBUG nova.compute.manager [req-b4285024-81c5-4d74-81ca-4b49ca2de961 req-4c38a1a6-f1e1-45a6-9b2a-17f7a720552b service nova] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Refreshing instance network info cache due to event network-changed-af3ba195-ad22-4557-b100-2d5c3fc527b2. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1007.509380] env[68674]: DEBUG oslo_concurrency.lockutils [req-b4285024-81c5-4d74-81ca-4b49ca2de961 req-4c38a1a6-f1e1-45a6-9b2a-17f7a720552b service nova] Acquiring lock "refresh_cache-8d810cc0-3f85-49c9-9d7d-8e1711a97015" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.511924] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Releasing lock "refresh_cache-8d810cc0-3f85-49c9-9d7d-8e1711a97015" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1007.512229] env[68674]: DEBUG nova.compute.manager [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Instance network_info: |[{"id": "af3ba195-ad22-4557-b100-2d5c3fc527b2", "address": "fa:16:3e:59:6c:b8", "network": {"id": "c0c4733f-8d0b-4cee-883f-2ad57ed16158", "bridge": "br-int", "label": "tempest-ServersTestJSON-40114649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cee54e456084086866d08b098a24b64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf3ba195-ad", "ovs_interfaceid": "af3ba195-ad22-4557-b100-2d5c3fc527b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1007.512517] env[68674]: DEBUG oslo_concurrency.lockutils [req-b4285024-81c5-4d74-81ca-4b49ca2de961 req-4c38a1a6-f1e1-45a6-9b2a-17f7a720552b service nova] Acquired lock "refresh_cache-8d810cc0-3f85-49c9-9d7d-8e1711a97015" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1007.512706] env[68674]: DEBUG nova.network.neutron [req-b4285024-81c5-4d74-81ca-4b49ca2de961 req-4c38a1a6-f1e1-45a6-9b2a-17f7a720552b service nova] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Refreshing network info cache for port af3ba195-ad22-4557-b100-2d5c3fc527b2 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1007.514463] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:59:6c:b8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'af3ba195-ad22-4557-b100-2d5c3fc527b2', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1007.524833] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1007.528675] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1007.528675] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7b0da866-782b-4895-badd-155f0f80cb77 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.552165] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1007.552165] env[68674]: value = "task-3240590" [ 1007.552165] env[68674]: _type = "Task" [ 1007.552165] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.561506] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240590, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.579783] env[68674]: DEBUG oslo_vmware.api [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a25fd4-67db-f8e3-ca60-d0833e64e913, 'name': SearchDatastore_Task, 'duration_secs': 0.050739} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.580274] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1007.580550] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1007.580786] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.580935] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1007.581127] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1007.581397] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b06ba47e-1420-431a-856b-1df7ec73801a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.592228] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1007.592495] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1007.593304] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04b76306-7910-4569-b164-ab718784b365 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.598890] env[68674]: DEBUG oslo_vmware.api [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Waiting for the task: (returnval){ [ 1007.598890] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d1fd03-5fa3-45d2-49a3-0539eb2884ee" [ 1007.598890] env[68674]: _type = "Task" [ 1007.598890] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.615570] env[68674]: DEBUG oslo_vmware.api [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d1fd03-5fa3-45d2-49a3-0539eb2884ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.619025] env[68674]: DEBUG oslo_vmware.api [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3240589, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.561475} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.619297] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 30731a3c-34ba-40c8-9b8f-2d867eff4f21/30731a3c-34ba-40c8-9b8f-2d867eff4f21.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1007.619509] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1007.619756] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4c539a01-85f7-4f6e-bee4-4018b883ebd8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.627871] env[68674]: DEBUG oslo_vmware.api [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1007.627871] env[68674]: value = "task-3240591" [ 1007.627871] env[68674]: _type = "Task" [ 1007.627871] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.636887] env[68674]: DEBUG oslo_vmware.api [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3240591, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.847234] env[68674]: DEBUG nova.compute.manager [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1007.850811] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9e51e73d-cc67-4d64-adc9-709de1ced716 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1007.851943] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bfd11711-5b33-4ae4-b2a7-67d774bbe5cd tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.323s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1007.852187] env[68674]: DEBUG nova.objects.instance [None req-bfd11711-5b33-4ae4-b2a7-67d774bbe5cd tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lazy-loading 'resources' on Instance uuid 7d953e59-53c1-4041-a641-35c12c012f7e {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1008.063293] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240590, 'name': CreateVM_Task, 'duration_secs': 0.374877} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.063504] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1008.064422] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.064643] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1008.064963] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1008.065232] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e77cafe0-0b5b-46a3-813b-c308ec280bf5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.071163] env[68674]: DEBUG oslo_vmware.api [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1008.071163] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52965e4b-ec44-aca8-4fff-0232f8947689" [ 1008.071163] env[68674]: _type = "Task" [ 1008.071163] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.076020] env[68674]: DEBUG nova.network.neutron [req-b4285024-81c5-4d74-81ca-4b49ca2de961 req-4c38a1a6-f1e1-45a6-9b2a-17f7a720552b service nova] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Updated VIF entry in instance network info cache for port af3ba195-ad22-4557-b100-2d5c3fc527b2. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1008.076020] env[68674]: DEBUG nova.network.neutron [req-b4285024-81c5-4d74-81ca-4b49ca2de961 req-4c38a1a6-f1e1-45a6-9b2a-17f7a720552b service nova] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Updating instance_info_cache with network_info: [{"id": "af3ba195-ad22-4557-b100-2d5c3fc527b2", "address": "fa:16:3e:59:6c:b8", "network": {"id": "c0c4733f-8d0b-4cee-883f-2ad57ed16158", "bridge": "br-int", "label": "tempest-ServersTestJSON-40114649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cee54e456084086866d08b098a24b64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf3ba195-ad", "ovs_interfaceid": "af3ba195-ad22-4557-b100-2d5c3fc527b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.091711] env[68674]: DEBUG oslo_vmware.api [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52965e4b-ec44-aca8-4fff-0232f8947689, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.111423] env[68674]: DEBUG oslo_vmware.api [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d1fd03-5fa3-45d2-49a3-0539eb2884ee, 'name': SearchDatastore_Task, 'duration_secs': 0.015161} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.112549] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-087b29a7-97ab-4b5b-b6f4-918e2e66143e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.122019] env[68674]: DEBUG oslo_vmware.api [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Waiting for the task: (returnval){ [ 1008.122019] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c70b35-b49d-3558-38a7-8dfee73fd0fb" [ 1008.122019] env[68674]: _type = "Task" [ 1008.122019] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.132611] env[68674]: DEBUG oslo_vmware.api [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c70b35-b49d-3558-38a7-8dfee73fd0fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.142232] env[68674]: DEBUG oslo_vmware.api [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3240591, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094296} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.142501] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1008.143413] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feca82c8-82e7-497d-a956-6d689008142d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.169283] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] 30731a3c-34ba-40c8-9b8f-2d867eff4f21/30731a3c-34ba-40c8-9b8f-2d867eff4f21.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1008.169556] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e4b0e1cd-2298-4f85-a545-c370c9fca701 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.192147] env[68674]: DEBUG oslo_vmware.api [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1008.192147] env[68674]: value = "task-3240592" [ 1008.192147] env[68674]: _type = "Task" [ 1008.192147] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.201418] env[68674]: DEBUG oslo_vmware.api [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3240592, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.229728] env[68674]: DEBUG nova.network.neutron [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Successfully created port: 4227b11c-e4da-42b5-80c0-af15c0b4de7f {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1008.583380] env[68674]: DEBUG oslo_vmware.api [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52965e4b-ec44-aca8-4fff-0232f8947689, 'name': SearchDatastore_Task, 'duration_secs': 0.011545} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.583859] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1008.584122] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1008.584356] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.592752] env[68674]: DEBUG oslo_concurrency.lockutils [req-b4285024-81c5-4d74-81ca-4b49ca2de961 req-4c38a1a6-f1e1-45a6-9b2a-17f7a720552b service nova] Releasing lock "refresh_cache-8d810cc0-3f85-49c9-9d7d-8e1711a97015" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1008.632915] env[68674]: DEBUG oslo_vmware.api [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c70b35-b49d-3558-38a7-8dfee73fd0fb, 'name': SearchDatastore_Task, 'duration_secs': 0.013325} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.633207] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1008.633668] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] cab97ca7-968b-4d40-bb1f-2244469e1b56/cab97ca7-968b-4d40-bb1f-2244469e1b56.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1008.633776] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1008.634020] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1008.635098] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f856e3f7-d657-4d15-bc32-71fa7e7ad90e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.636343] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-62154dc1-aaa5-49f0-9186-4636cd2d431f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.645547] env[68674]: DEBUG oslo_vmware.api [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Waiting for the task: (returnval){ [ 1008.645547] env[68674]: value = "task-3240593" [ 1008.645547] env[68674]: _type = "Task" [ 1008.645547] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.650172] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1008.650380] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1008.651692] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a015da3-f093-478d-8203-67480e2e82c6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.657487] env[68674]: DEBUG oslo_vmware.api [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': task-3240593, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.666028] env[68674]: DEBUG oslo_vmware.api [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1008.666028] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52800275-b292-a5a7-9e97-0e4286740dd7" [ 1008.666028] env[68674]: _type = "Task" [ 1008.666028] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.676457] env[68674]: DEBUG oslo_vmware.api [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52800275-b292-a5a7-9e97-0e4286740dd7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.704641] env[68674]: DEBUG oslo_vmware.api [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3240592, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.728675] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed69ba63-46d3-4690-aac6-ba6b337ab326 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.736995] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7a001bc-b8d4-49cf-b19b-7a8d4449442e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.771969] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2516eaa2-68d0-46fc-9d6b-f76ec3b69908 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.780303] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10edb9a0-924a-47a3-a8df-1593d6d590b3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.795486] env[68674]: DEBUG nova.compute.provider_tree [None req-bfd11711-5b33-4ae4-b2a7-67d774bbe5cd tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1008.865897] env[68674]: DEBUG nova.compute.manager [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1008.896833] env[68674]: DEBUG nova.virt.hardware [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1008.897136] env[68674]: DEBUG nova.virt.hardware [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1008.897324] env[68674]: DEBUG nova.virt.hardware [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1008.897475] env[68674]: DEBUG nova.virt.hardware [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1008.897625] env[68674]: DEBUG nova.virt.hardware [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1008.897888] env[68674]: DEBUG nova.virt.hardware [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1008.897981] env[68674]: DEBUG nova.virt.hardware [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1008.898189] env[68674]: DEBUG nova.virt.hardware [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1008.898368] env[68674]: DEBUG nova.virt.hardware [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1008.898535] env[68674]: DEBUG nova.virt.hardware [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1008.898705] env[68674]: DEBUG nova.virt.hardware [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1008.899642] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-372177ca-3370-4888-a736-487af7fe236c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.909047] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ded7428d-8c0a-41bf-96a2-e7671ed666de {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.156890] env[68674]: DEBUG oslo_vmware.api [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': task-3240593, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.176669] env[68674]: DEBUG oslo_vmware.api [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52800275-b292-a5a7-9e97-0e4286740dd7, 'name': SearchDatastore_Task, 'duration_secs': 0.011664} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.177450] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9e11cc9-7737-4fb6-aa69-b06ae01355b5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.184782] env[68674]: DEBUG oslo_vmware.api [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1009.184782] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]529e9dc5-7b9f-e7e2-309e-3ee5dcd56520" [ 1009.184782] env[68674]: _type = "Task" [ 1009.184782] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.192843] env[68674]: DEBUG oslo_vmware.api [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]529e9dc5-7b9f-e7e2-309e-3ee5dcd56520, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.202629] env[68674]: DEBUG oslo_vmware.api [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3240592, 'name': ReconfigVM_Task, 'duration_secs': 0.652523} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.202924] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Reconfigured VM instance instance-0000005d to attach disk [datastore1] 30731a3c-34ba-40c8-9b8f-2d867eff4f21/30731a3c-34ba-40c8-9b8f-2d867eff4f21.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1009.203612] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-be281414-c831-45ee-bc8b-9aaa8b0dcef0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.211098] env[68674]: DEBUG oslo_vmware.api [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1009.211098] env[68674]: value = "task-3240594" [ 1009.211098] env[68674]: _type = "Task" [ 1009.211098] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.219298] env[68674]: DEBUG oslo_vmware.api [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3240594, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.298931] env[68674]: DEBUG nova.scheduler.client.report [None req-bfd11711-5b33-4ae4-b2a7-67d774bbe5cd tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1009.658171] env[68674]: DEBUG oslo_vmware.api [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': task-3240593, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.530661} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.658479] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] cab97ca7-968b-4d40-bb1f-2244469e1b56/cab97ca7-968b-4d40-bb1f-2244469e1b56.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1009.658703] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1009.658959] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c96e8743-5736-476c-8e79-f8169cdb5b2c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.667058] env[68674]: DEBUG oslo_vmware.api [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Waiting for the task: (returnval){ [ 1009.667058] env[68674]: value = "task-3240595" [ 1009.667058] env[68674]: _type = "Task" [ 1009.667058] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.681089] env[68674]: DEBUG oslo_vmware.api [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': task-3240595, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.694759] env[68674]: DEBUG oslo_vmware.api [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]529e9dc5-7b9f-e7e2-309e-3ee5dcd56520, 'name': SearchDatastore_Task, 'duration_secs': 0.01129} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.695035] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1009.695302] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 8d810cc0-3f85-49c9-9d7d-8e1711a97015/8d810cc0-3f85-49c9-9d7d-8e1711a97015.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1009.695556] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-40217264-accb-434a-93f0-49013c99c75e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.705192] env[68674]: DEBUG oslo_vmware.api [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1009.705192] env[68674]: value = "task-3240596" [ 1009.705192] env[68674]: _type = "Task" [ 1009.705192] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.717918] env[68674]: DEBUG oslo_vmware.api [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240596, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.724596] env[68674]: DEBUG oslo_vmware.api [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3240594, 'name': Rename_Task, 'duration_secs': 0.1745} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.724596] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1009.725180] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-05889ea5-c763-4165-8029-25c8ee04fb41 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.732271] env[68674]: DEBUG oslo_vmware.api [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1009.732271] env[68674]: value = "task-3240597" [ 1009.732271] env[68674]: _type = "Task" [ 1009.732271] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.743381] env[68674]: DEBUG oslo_vmware.api [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3240597, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.805591] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bfd11711-5b33-4ae4-b2a7-67d774bbe5cd tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.953s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1009.810577] env[68674]: DEBUG oslo_concurrency.lockutils [None req-89de4556-077a-44ea-a2fa-f34dde0ed400 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.810s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1009.810842] env[68674]: DEBUG nova.objects.instance [None req-89de4556-077a-44ea-a2fa-f34dde0ed400 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Lazy-loading 'resources' on Instance uuid bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1009.835507] env[68674]: INFO nova.scheduler.client.report [None req-bfd11711-5b33-4ae4-b2a7-67d774bbe5cd tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Deleted allocations for instance 7d953e59-53c1-4041-a641-35c12c012f7e [ 1009.879168] env[68674]: DEBUG nova.compute.manager [req-dd6e52e2-1930-466a-b20f-1ed38b42d90c req-d2496718-e1b9-4119-9755-9043e73c4f4f service nova] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Received event network-vif-plugged-4227b11c-e4da-42b5-80c0-af15c0b4de7f {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1009.879168] env[68674]: DEBUG oslo_concurrency.lockutils [req-dd6e52e2-1930-466a-b20f-1ed38b42d90c req-d2496718-e1b9-4119-9755-9043e73c4f4f service nova] Acquiring lock "182deaf0-c20a-4041-8f41-81786d6b053e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1009.879168] env[68674]: DEBUG oslo_concurrency.lockutils [req-dd6e52e2-1930-466a-b20f-1ed38b42d90c req-d2496718-e1b9-4119-9755-9043e73c4f4f service nova] Lock "182deaf0-c20a-4041-8f41-81786d6b053e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1009.879168] env[68674]: DEBUG oslo_concurrency.lockutils [req-dd6e52e2-1930-466a-b20f-1ed38b42d90c req-d2496718-e1b9-4119-9755-9043e73c4f4f service nova] Lock "182deaf0-c20a-4041-8f41-81786d6b053e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1009.879907] env[68674]: DEBUG nova.compute.manager [req-dd6e52e2-1930-466a-b20f-1ed38b42d90c req-d2496718-e1b9-4119-9755-9043e73c4f4f service nova] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] No waiting events found dispatching network-vif-plugged-4227b11c-e4da-42b5-80c0-af15c0b4de7f {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1009.880330] env[68674]: WARNING nova.compute.manager [req-dd6e52e2-1930-466a-b20f-1ed38b42d90c req-d2496718-e1b9-4119-9755-9043e73c4f4f service nova] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Received unexpected event network-vif-plugged-4227b11c-e4da-42b5-80c0-af15c0b4de7f for instance with vm_state building and task_state spawning. [ 1009.978161] env[68674]: DEBUG nova.network.neutron [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Successfully updated port: 4227b11c-e4da-42b5-80c0-af15c0b4de7f {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1010.177414] env[68674]: DEBUG oslo_vmware.api [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': task-3240595, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076841} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.177751] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1010.178600] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f8e961-d224-4b63-893d-b978264ce8dc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.207330] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] cab97ca7-968b-4d40-bb1f-2244469e1b56/cab97ca7-968b-4d40-bb1f-2244469e1b56.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1010.207742] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-18e687cc-18d5-499e-9bec-a5e48919f636 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.231906] env[68674]: DEBUG oslo_vmware.api [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240596, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.484736} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.233202] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 8d810cc0-3f85-49c9-9d7d-8e1711a97015/8d810cc0-3f85-49c9-9d7d-8e1711a97015.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1010.233452] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1010.233834] env[68674]: DEBUG oslo_vmware.api [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Waiting for the task: (returnval){ [ 1010.233834] env[68674]: value = "task-3240598" [ 1010.233834] env[68674]: _type = "Task" [ 1010.233834] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.234047] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-314c0eac-e277-47aa-a564-ec5786bb19f5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.248327] env[68674]: DEBUG oslo_vmware.api [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': task-3240598, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.252714] env[68674]: DEBUG oslo_vmware.api [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1010.252714] env[68674]: value = "task-3240599" [ 1010.252714] env[68674]: _type = "Task" [ 1010.252714] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.253304] env[68674]: DEBUG oslo_vmware.api [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3240597, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.263740] env[68674]: DEBUG oslo_vmware.api [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240599, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.346555] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bfd11711-5b33-4ae4-b2a7-67d774bbe5cd tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lock "7d953e59-53c1-4041-a641-35c12c012f7e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.898s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1010.481017] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "refresh_cache-182deaf0-c20a-4041-8f41-81786d6b053e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.481306] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquired lock "refresh_cache-182deaf0-c20a-4041-8f41-81786d6b053e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1010.481933] env[68674]: DEBUG nova.network.neutron [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1010.620029] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4021959e-2717-4dbd-bf2d-2fd3bbfb5bea {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.629962] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77f073f5-3df5-4363-8829-df3fe4a382f9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.666247] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7b8954b-2ebc-4552-8c6c-74039bba0b1c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.674687] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a833d965-fa9b-4310-9dbe-bf332f7a92c3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.688803] env[68674]: DEBUG nova.compute.provider_tree [None req-89de4556-077a-44ea-a2fa-f34dde0ed400 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1010.752762] env[68674]: DEBUG oslo_vmware.api [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': task-3240598, 'name': ReconfigVM_Task, 'duration_secs': 0.319186} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.753301] env[68674]: DEBUG oslo_vmware.api [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3240597, 'name': PowerOnVM_Task, 'duration_secs': 0.938579} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.753706] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Reconfigured VM instance instance-0000005f to attach disk [datastore1] cab97ca7-968b-4d40-bb1f-2244469e1b56/cab97ca7-968b-4d40-bb1f-2244469e1b56.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1010.757056] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1010.757056] env[68674]: INFO nova.compute.manager [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Took 9.40 seconds to spawn the instance on the hypervisor. [ 1010.757056] env[68674]: DEBUG nova.compute.manager [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1010.757056] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c323ad6e-b9de-4d92-8490-48adb6a97b39 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.760588] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15f2c1ad-e337-4ace-a9e6-58b26ea0ce30 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.770741] env[68674]: DEBUG oslo_vmware.api [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240599, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074495} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.772768] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1010.773263] env[68674]: DEBUG oslo_vmware.api [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Waiting for the task: (returnval){ [ 1010.773263] env[68674]: value = "task-3240600" [ 1010.773263] env[68674]: _type = "Task" [ 1010.773263] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.779442] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e430a986-ca74-47c0-8597-c159237fea2f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.814925] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] 8d810cc0-3f85-49c9-9d7d-8e1711a97015/8d810cc0-3f85-49c9-9d7d-8e1711a97015.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1010.815743] env[68674]: DEBUG oslo_vmware.api [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': task-3240600, 'name': Rename_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.816057] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-24eb18bd-0b1d-40fa-a1b1-3b81c445930f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.840484] env[68674]: DEBUG oslo_vmware.api [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1010.840484] env[68674]: value = "task-3240601" [ 1010.840484] env[68674]: _type = "Task" [ 1010.840484] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.851400] env[68674]: DEBUG oslo_vmware.api [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240601, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.040066] env[68674]: DEBUG nova.network.neutron [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1011.192172] env[68674]: DEBUG nova.scheduler.client.report [None req-89de4556-077a-44ea-a2fa-f34dde0ed400 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1011.260760] env[68674]: DEBUG nova.network.neutron [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Updating instance_info_cache with network_info: [{"id": "4227b11c-e4da-42b5-80c0-af15c0b4de7f", "address": "fa:16:3e:e9:a4:ab", "network": {"id": "2141da47-c6b2-4270-9d0f-d999f7c26b83", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-460904531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa34d6d90c6d46aaa2cb77259b5e0c27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b9aabc7c-0f6c-42eb-bd27-493a1496c0c8", "external-id": "nsx-vlan-transportzone-368", "segmentation_id": 368, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4227b11c-e4", "ovs_interfaceid": "4227b11c-e4da-42b5-80c0-af15c0b4de7f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.298825] env[68674]: INFO nova.compute.manager [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Took 21.90 seconds to build instance. [ 1011.307532] env[68674]: DEBUG oslo_vmware.api [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': task-3240600, 'name': Rename_Task, 'duration_secs': 0.159206} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.309020] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1011.309020] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0597b259-c4c8-47ff-ba7f-11004105c9c3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.318539] env[68674]: DEBUG oslo_vmware.api [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Waiting for the task: (returnval){ [ 1011.318539] env[68674]: value = "task-3240603" [ 1011.318539] env[68674]: _type = "Task" [ 1011.318539] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.337514] env[68674]: DEBUG oslo_vmware.api [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': task-3240603, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.351774] env[68674]: DEBUG oslo_vmware.api [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240601, 'name': ReconfigVM_Task, 'duration_secs': 0.347637} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.352126] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Reconfigured VM instance instance-0000005e to attach disk [datastore1] 8d810cc0-3f85-49c9-9d7d-8e1711a97015/8d810cc0-3f85-49c9-9d7d-8e1711a97015.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1011.352779] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a86dfd27-652d-48b6-b783-fd2481872738 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.363589] env[68674]: DEBUG oslo_vmware.api [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1011.363589] env[68674]: value = "task-3240604" [ 1011.363589] env[68674]: _type = "Task" [ 1011.363589] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.372844] env[68674]: DEBUG oslo_vmware.api [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240604, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.698022] env[68674]: DEBUG oslo_concurrency.lockutils [None req-89de4556-077a-44ea-a2fa-f34dde0ed400 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.887s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1011.700479] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.110s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1011.702078] env[68674]: INFO nova.compute.claims [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1011.722324] env[68674]: INFO nova.scheduler.client.report [None req-89de4556-077a-44ea-a2fa-f34dde0ed400 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Deleted allocations for instance bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77 [ 1011.766026] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Releasing lock "refresh_cache-182deaf0-c20a-4041-8f41-81786d6b053e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1011.766026] env[68674]: DEBUG nova.compute.manager [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Instance network_info: |[{"id": "4227b11c-e4da-42b5-80c0-af15c0b4de7f", "address": "fa:16:3e:e9:a4:ab", "network": {"id": "2141da47-c6b2-4270-9d0f-d999f7c26b83", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-460904531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa34d6d90c6d46aaa2cb77259b5e0c27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b9aabc7c-0f6c-42eb-bd27-493a1496c0c8", "external-id": "nsx-vlan-transportzone-368", "segmentation_id": 368, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4227b11c-e4", "ovs_interfaceid": "4227b11c-e4da-42b5-80c0-af15c0b4de7f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1011.766026] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e9:a4:ab', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b9aabc7c-0f6c-42eb-bd27-493a1496c0c8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4227b11c-e4da-42b5-80c0-af15c0b4de7f', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1011.772920] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1011.773959] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1011.774221] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0524b7bf-26f4-461c-8e81-9c5c9e0c4cf9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.798970] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1011.798970] env[68674]: value = "task-3240605" [ 1011.798970] env[68674]: _type = "Task" [ 1011.798970] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.804749] env[68674]: DEBUG oslo_concurrency.lockutils [None req-22b8709d-e9c9-42a3-a708-4d2f60b499fe tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "30731a3c-34ba-40c8-9b8f-2d867eff4f21" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.416s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1011.810613] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240605, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.829937] env[68674]: DEBUG oslo_vmware.api [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': task-3240603, 'name': PowerOnVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.873797] env[68674]: DEBUG oslo_vmware.api [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240604, 'name': Rename_Task, 'duration_secs': 0.264868} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.873962] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1011.874110] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-08d026fe-b064-496a-8cd0-54aca7ac7ccc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.881308] env[68674]: DEBUG oslo_vmware.api [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1011.881308] env[68674]: value = "task-3240606" [ 1011.881308] env[68674]: _type = "Task" [ 1011.881308] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.964782] env[68674]: DEBUG nova.compute.manager [req-9a4087c5-c27f-42c4-8777-37e4465a641a req-f319849b-34ae-497a-9f62-e36747b20ead service nova] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Received event network-changed-4227b11c-e4da-42b5-80c0-af15c0b4de7f {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1011.964782] env[68674]: DEBUG nova.compute.manager [req-9a4087c5-c27f-42c4-8777-37e4465a641a req-f319849b-34ae-497a-9f62-e36747b20ead service nova] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Refreshing instance network info cache due to event network-changed-4227b11c-e4da-42b5-80c0-af15c0b4de7f. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1011.964934] env[68674]: DEBUG oslo_concurrency.lockutils [req-9a4087c5-c27f-42c4-8777-37e4465a641a req-f319849b-34ae-497a-9f62-e36747b20ead service nova] Acquiring lock "refresh_cache-182deaf0-c20a-4041-8f41-81786d6b053e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.964999] env[68674]: DEBUG oslo_concurrency.lockutils [req-9a4087c5-c27f-42c4-8777-37e4465a641a req-f319849b-34ae-497a-9f62-e36747b20ead service nova] Acquired lock "refresh_cache-182deaf0-c20a-4041-8f41-81786d6b053e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1011.965424] env[68674]: DEBUG nova.network.neutron [req-9a4087c5-c27f-42c4-8777-37e4465a641a req-f319849b-34ae-497a-9f62-e36747b20ead service nova] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Refreshing network info cache for port 4227b11c-e4da-42b5-80c0-af15c0b4de7f {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1012.137637] env[68674]: DEBUG nova.compute.manager [req-47584cf6-20f0-406c-b5b8-a28ce3acf84f req-65f40d33-fb0e-4ba5-aff7-b414637e5c20 service nova] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Received event network-changed-fa6ef6fe-e229-4cc2-8230-7318adaa728e {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1012.137922] env[68674]: DEBUG nova.compute.manager [req-47584cf6-20f0-406c-b5b8-a28ce3acf84f req-65f40d33-fb0e-4ba5-aff7-b414637e5c20 service nova] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Refreshing instance network info cache due to event network-changed-fa6ef6fe-e229-4cc2-8230-7318adaa728e. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1012.138166] env[68674]: DEBUG oslo_concurrency.lockutils [req-47584cf6-20f0-406c-b5b8-a28ce3acf84f req-65f40d33-fb0e-4ba5-aff7-b414637e5c20 service nova] Acquiring lock "refresh_cache-30731a3c-34ba-40c8-9b8f-2d867eff4f21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.138370] env[68674]: DEBUG oslo_concurrency.lockutils [req-47584cf6-20f0-406c-b5b8-a28ce3acf84f req-65f40d33-fb0e-4ba5-aff7-b414637e5c20 service nova] Acquired lock "refresh_cache-30731a3c-34ba-40c8-9b8f-2d867eff4f21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1012.138562] env[68674]: DEBUG nova.network.neutron [req-47584cf6-20f0-406c-b5b8-a28ce3acf84f req-65f40d33-fb0e-4ba5-aff7-b414637e5c20 service nova] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Refreshing network info cache for port fa6ef6fe-e229-4cc2-8230-7318adaa728e {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1012.230138] env[68674]: DEBUG oslo_concurrency.lockutils [None req-89de4556-077a-44ea-a2fa-f34dde0ed400 tempest-ServerGroupTestJSON-1536476529 tempest-ServerGroupTestJSON-1536476529-project-member] Lock "bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.050s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1012.311243] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240605, 'name': CreateVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.330305] env[68674]: DEBUG oslo_vmware.api [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': task-3240603, 'name': PowerOnVM_Task, 'duration_secs': 0.620426} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.330539] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1012.330743] env[68674]: INFO nova.compute.manager [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Took 5.91 seconds to spawn the instance on the hypervisor. [ 1012.330954] env[68674]: DEBUG nova.compute.manager [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1012.331777] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-474b5897-2ce6-4d72-ae5a-29938ca3de7b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.408059] env[68674]: DEBUG oslo_vmware.api [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240606, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.816432] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240605, 'name': CreateVM_Task, 'duration_secs': 0.555841} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.816956] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1012.817379] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.817694] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1012.818534] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1012.818534] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26dd5a03-e65f-4d9d-b116-823a6a0b2d2e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.826118] env[68674]: DEBUG oslo_vmware.api [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1012.826118] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52127b2b-bdd2-852a-d72b-600b85149ebd" [ 1012.826118] env[68674]: _type = "Task" [ 1012.826118] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.834626] env[68674]: DEBUG oslo_vmware.api [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52127b2b-bdd2-852a-d72b-600b85149ebd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.852070] env[68674]: INFO nova.compute.manager [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Took 17.35 seconds to build instance. [ 1012.909389] env[68674]: DEBUG oslo_vmware.api [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240606, 'name': PowerOnVM_Task, 'duration_secs': 0.832026} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.909639] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1012.909883] env[68674]: INFO nova.compute.manager [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Took 9.01 seconds to spawn the instance on the hypervisor. [ 1012.910103] env[68674]: DEBUG nova.compute.manager [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1012.910924] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59205b7e-7543-4850-ad93-76c16d34cc26 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.054043] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa05691-e721-4998-a761-5efd73d92355 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.061921] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e808ca3-2fcb-4480-8a00-2f26764e7c81 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.093069] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0722be3-fa73-4ff0-8bd4-462b11937508 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.103659] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87edfc0a-a894-4934-b081-7e74fa1af188 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.121861] env[68674]: DEBUG nova.compute.provider_tree [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1013.208780] env[68674]: DEBUG nova.network.neutron [req-9a4087c5-c27f-42c4-8777-37e4465a641a req-f319849b-34ae-497a-9f62-e36747b20ead service nova] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Updated VIF entry in instance network info cache for port 4227b11c-e4da-42b5-80c0-af15c0b4de7f. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1013.208780] env[68674]: DEBUG nova.network.neutron [req-9a4087c5-c27f-42c4-8777-37e4465a641a req-f319849b-34ae-497a-9f62-e36747b20ead service nova] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Updating instance_info_cache with network_info: [{"id": "4227b11c-e4da-42b5-80c0-af15c0b4de7f", "address": "fa:16:3e:e9:a4:ab", "network": {"id": "2141da47-c6b2-4270-9d0f-d999f7c26b83", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-460904531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa34d6d90c6d46aaa2cb77259b5e0c27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b9aabc7c-0f6c-42eb-bd27-493a1496c0c8", "external-id": "nsx-vlan-transportzone-368", "segmentation_id": 368, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4227b11c-e4", "ovs_interfaceid": "4227b11c-e4da-42b5-80c0-af15c0b4de7f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.336749] env[68674]: DEBUG oslo_vmware.api [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52127b2b-bdd2-852a-d72b-600b85149ebd, 'name': SearchDatastore_Task, 'duration_secs': 0.023033} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.337212] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1013.337553] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1013.337906] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1013.338172] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1013.338445] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1013.338785] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-21dbc422-554b-4a51-86b0-26fb1d2f502b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.346021] env[68674]: DEBUG oslo_concurrency.lockutils [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Acquiring lock "e9bebb3b-78ff-42b1-a350-efd1db5c6eaa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1013.346477] env[68674]: DEBUG oslo_concurrency.lockutils [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Lock "e9bebb3b-78ff-42b1-a350-efd1db5c6eaa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1013.350028] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1013.350028] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1013.350814] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec9a92ce-4692-4f83-8e4e-79163912e399 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.357392] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d56d26e-1834-4df0-815d-65ce5ee62f0f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Lock "cab97ca7-968b-4d40-bb1f-2244469e1b56" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.867s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1013.357745] env[68674]: DEBUG oslo_vmware.api [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1013.357745] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ba5807-b2b8-a258-8678-3f1fdb524aa9" [ 1013.357745] env[68674]: _type = "Task" [ 1013.357745] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.370973] env[68674]: DEBUG oslo_vmware.api [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ba5807-b2b8-a258-8678-3f1fdb524aa9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.375646] env[68674]: INFO nova.compute.manager [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Rebuilding instance [ 1013.427391] env[68674]: DEBUG nova.compute.manager [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1013.432028] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1be8787-49df-45f7-a623-e6fe8f842e30 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.434266] env[68674]: INFO nova.compute.manager [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Took 22.73 seconds to build instance. [ 1013.452602] env[68674]: DEBUG nova.network.neutron [req-47584cf6-20f0-406c-b5b8-a28ce3acf84f req-65f40d33-fb0e-4ba5-aff7-b414637e5c20 service nova] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Updated VIF entry in instance network info cache for port fa6ef6fe-e229-4cc2-8230-7318adaa728e. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1013.452962] env[68674]: DEBUG nova.network.neutron [req-47584cf6-20f0-406c-b5b8-a28ce3acf84f req-65f40d33-fb0e-4ba5-aff7-b414637e5c20 service nova] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Updating instance_info_cache with network_info: [{"id": "fa6ef6fe-e229-4cc2-8230-7318adaa728e", "address": "fa:16:3e:6c:f7:a2", "network": {"id": "f2a6b57a-fec9-4bd2-9828-2b72f21f2393", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1479923638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.236", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f2a133c72064227bd419d63d5d9557f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa6ef6fe-e2", "ovs_interfaceid": "fa6ef6fe-e229-4cc2-8230-7318adaa728e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.630419] env[68674]: DEBUG nova.scheduler.client.report [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1013.711155] env[68674]: DEBUG oslo_concurrency.lockutils [req-9a4087c5-c27f-42c4-8777-37e4465a641a req-f319849b-34ae-497a-9f62-e36747b20ead service nova] Releasing lock "refresh_cache-182deaf0-c20a-4041-8f41-81786d6b053e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1013.852361] env[68674]: DEBUG nova.compute.manager [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1013.870841] env[68674]: DEBUG oslo_vmware.api [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ba5807-b2b8-a258-8678-3f1fdb524aa9, 'name': SearchDatastore_Task, 'duration_secs': 0.013745} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.871252] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9582cadb-b7a1-47f4-9916-23fd968194dc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.878170] env[68674]: DEBUG oslo_vmware.api [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1013.878170] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d4e232-62d4-a44d-82e9-6c668a3f21cd" [ 1013.878170] env[68674]: _type = "Task" [ 1013.878170] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.887935] env[68674]: DEBUG oslo_vmware.api [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d4e232-62d4-a44d-82e9-6c668a3f21cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.939638] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0a7fc352-92b5-4831-9130-8fcdf3530752 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "8d810cc0-3f85-49c9-9d7d-8e1711a97015" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.258s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1013.955722] env[68674]: DEBUG oslo_concurrency.lockutils [req-47584cf6-20f0-406c-b5b8-a28ce3acf84f req-65f40d33-fb0e-4ba5-aff7-b414637e5c20 service nova] Releasing lock "refresh_cache-30731a3c-34ba-40c8-9b8f-2d867eff4f21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1014.133322] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.433s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1014.133951] env[68674]: DEBUG nova.compute.manager [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1014.137308] env[68674]: DEBUG oslo_concurrency.lockutils [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.297s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1014.138845] env[68674]: INFO nova.compute.claims [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1014.377306] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "8740a794-a772-4260-aeb1-51762a586fe2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1014.377562] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "8740a794-a772-4260-aeb1-51762a586fe2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1014.385189] env[68674]: DEBUG oslo_concurrency.lockutils [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1014.392916] env[68674]: DEBUG oslo_vmware.api [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d4e232-62d4-a44d-82e9-6c668a3f21cd, 'name': SearchDatastore_Task, 'duration_secs': 0.013072} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.393847] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1014.394130] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 182deaf0-c20a-4041-8f41-81786d6b053e/182deaf0-c20a-4041-8f41-81786d6b053e.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1014.394436] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3f236ec0-0d7f-4b73-9677-1c7c01a7a756 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.402837] env[68674]: DEBUG oslo_vmware.api [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1014.402837] env[68674]: value = "task-3240607" [ 1014.402837] env[68674]: _type = "Task" [ 1014.402837] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.412659] env[68674]: DEBUG oslo_vmware.api [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240607, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.445119] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1014.445119] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2b73d694-b198-4f09-8020-e880b5e38a85 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.454965] env[68674]: DEBUG oslo_vmware.api [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Waiting for the task: (returnval){ [ 1014.454965] env[68674]: value = "task-3240608" [ 1014.454965] env[68674]: _type = "Task" [ 1014.454965] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.469212] env[68674]: DEBUG oslo_vmware.api [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': task-3240608, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.639671] env[68674]: DEBUG nova.compute.utils [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1014.640799] env[68674]: DEBUG nova.compute.manager [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1014.640975] env[68674]: DEBUG nova.network.neutron [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1014.706120] env[68674]: DEBUG nova.policy [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '50e46e8c9fbc4778b5f89359ae81bfa3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f6b179855b874365964446f95f9f5a53', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 1014.839629] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquiring lock "5c12cb5d-821c-4e63-86a0-dadc9794a8ba" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1014.839921] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lock "5c12cb5d-821c-4e63-86a0-dadc9794a8ba" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1014.840268] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquiring lock "5c12cb5d-821c-4e63-86a0-dadc9794a8ba-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1014.840370] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lock "5c12cb5d-821c-4e63-86a0-dadc9794a8ba-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1014.840566] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lock "5c12cb5d-821c-4e63-86a0-dadc9794a8ba-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1014.844969] env[68674]: INFO nova.compute.manager [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Terminating instance [ 1014.885242] env[68674]: DEBUG nova.compute.manager [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1014.915865] env[68674]: DEBUG oslo_vmware.api [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240607, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.966425] env[68674]: DEBUG oslo_vmware.api [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': task-3240608, 'name': PowerOffVM_Task, 'duration_secs': 0.281765} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.967071] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1014.967507] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1014.969109] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a54b47e0-05e2-41c9-a7ec-f0a5471eb92f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.985340] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1014.985340] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cf8a8a58-30f1-44b9-a1cc-9b7ac6756e50 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.014322] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1015.014322] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1015.014322] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Deleting the datastore file [datastore1] cab97ca7-968b-4d40-bb1f-2244469e1b56 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1015.014322] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aa4c291f-93db-457c-aae3-6bff33200b4c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.021414] env[68674]: DEBUG oslo_vmware.api [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Waiting for the task: (returnval){ [ 1015.021414] env[68674]: value = "task-3240610" [ 1015.021414] env[68674]: _type = "Task" [ 1015.021414] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.026429] env[68674]: DEBUG nova.network.neutron [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Successfully created port: 1e919367-5786-4b72-b1e9-31aed453f5fc {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1015.033036] env[68674]: DEBUG oslo_vmware.api [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': task-3240610, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.144905] env[68674]: DEBUG nova.compute.manager [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1015.351104] env[68674]: DEBUG nova.compute.manager [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1015.351387] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1015.352261] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c5cba73-92c2-419f-80e0-03e9b48caf50 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.365262] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1015.365530] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0c3c1c37-0098-480f-934a-f994dcab6fc9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.373026] env[68674]: DEBUG oslo_vmware.api [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 1015.373026] env[68674]: value = "task-3240611" [ 1015.373026] env[68674]: _type = "Task" [ 1015.373026] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.387296] env[68674]: DEBUG oslo_vmware.api [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240611, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.411642] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1015.421078] env[68674]: DEBUG oslo_vmware.api [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240607, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.623214} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.421460] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 182deaf0-c20a-4041-8f41-81786d6b053e/182deaf0-c20a-4041-8f41-81786d6b053e.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1015.421719] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1015.422177] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-97a2ce08-c5eb-497f-a7da-d76fc2556d9f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.434549] env[68674]: DEBUG oslo_vmware.api [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1015.434549] env[68674]: value = "task-3240612" [ 1015.434549] env[68674]: _type = "Task" [ 1015.434549] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.447191] env[68674]: DEBUG oslo_vmware.api [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240612, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.483436] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1cdf0bf-d665-4ddf-bfb3-aef2ad728861 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.493445] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-617d2b11-d1f7-4a7e-8426-b9dd04959b0b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.532924] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-701639e3-a2ab-4c63-a266-a7d3cc6e17f6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.547486] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38d21ec2-8ba8-4703-8053-53c59f83e518 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.551728] env[68674]: DEBUG oslo_vmware.api [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': task-3240610, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.386914} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.552143] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1015.552228] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1015.552410] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1015.568417] env[68674]: DEBUG nova.compute.provider_tree [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1015.886910] env[68674]: DEBUG oslo_vmware.api [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240611, 'name': PowerOffVM_Task, 'duration_secs': 0.311497} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.887222] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1015.887409] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1015.887662] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-58e5e3fd-a941-4a6d-9a1a-dcde4c63afba {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.945113] env[68674]: DEBUG oslo_vmware.api [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240612, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.119114} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.945447] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1015.946257] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fa49b37-ae87-4412-815a-4b7a3a6a717d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.969306] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] 182deaf0-c20a-4041-8f41-81786d6b053e/182deaf0-c20a-4041-8f41-81786d6b053e.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1015.969499] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-288e1c4d-f09a-4d65-8595-6ec376b300a4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.986321] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1015.986596] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1015.986793] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Deleting the datastore file [datastore2] 5c12cb5d-821c-4e63-86a0-dadc9794a8ba {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1015.987444] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5350324d-e2b9-441b-8f7d-d7318a73b79b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.991846] env[68674]: DEBUG oslo_vmware.api [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1015.991846] env[68674]: value = "task-3240614" [ 1015.991846] env[68674]: _type = "Task" [ 1015.991846] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.996054] env[68674]: DEBUG oslo_vmware.api [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 1015.996054] env[68674]: value = "task-3240615" [ 1015.996054] env[68674]: _type = "Task" [ 1015.996054] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.002876] env[68674]: DEBUG oslo_vmware.api [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240614, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.007768] env[68674]: DEBUG oslo_vmware.api [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240615, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.072063] env[68674]: DEBUG nova.scheduler.client.report [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1016.159224] env[68674]: DEBUG nova.compute.manager [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1016.187025] env[68674]: DEBUG nova.virt.hardware [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1016.187025] env[68674]: DEBUG nova.virt.hardware [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1016.187025] env[68674]: DEBUG nova.virt.hardware [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1016.187025] env[68674]: DEBUG nova.virt.hardware [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1016.187025] env[68674]: DEBUG nova.virt.hardware [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1016.187025] env[68674]: DEBUG nova.virt.hardware [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1016.187025] env[68674]: DEBUG nova.virt.hardware [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1016.187025] env[68674]: DEBUG nova.virt.hardware [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1016.187025] env[68674]: DEBUG nova.virt.hardware [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1016.187025] env[68674]: DEBUG nova.virt.hardware [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1016.187445] env[68674]: DEBUG nova.virt.hardware [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1016.188164] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5932715d-a64c-4582-ae33-0e046cc96e2a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.197039] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53de4aeb-0a94-4be7-9518-35f4e61d9f40 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.502966] env[68674]: DEBUG nova.compute.manager [req-ed68d787-6c60-4253-96d0-a7a54bd5152a req-5fbf285e-eccf-4ff6-b353-d7a2464da3c9 service nova] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Received event network-vif-plugged-1e919367-5786-4b72-b1e9-31aed453f5fc {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1016.503313] env[68674]: DEBUG oslo_concurrency.lockutils [req-ed68d787-6c60-4253-96d0-a7a54bd5152a req-5fbf285e-eccf-4ff6-b353-d7a2464da3c9 service nova] Acquiring lock "79ee95b6-7321-4e33-a0e4-2c8ed1bc1031-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1016.503600] env[68674]: DEBUG oslo_concurrency.lockutils [req-ed68d787-6c60-4253-96d0-a7a54bd5152a req-5fbf285e-eccf-4ff6-b353-d7a2464da3c9 service nova] Lock "79ee95b6-7321-4e33-a0e4-2c8ed1bc1031-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1016.503851] env[68674]: DEBUG oslo_concurrency.lockutils [req-ed68d787-6c60-4253-96d0-a7a54bd5152a req-5fbf285e-eccf-4ff6-b353-d7a2464da3c9 service nova] Lock "79ee95b6-7321-4e33-a0e4-2c8ed1bc1031-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1016.504216] env[68674]: DEBUG nova.compute.manager [req-ed68d787-6c60-4253-96d0-a7a54bd5152a req-5fbf285e-eccf-4ff6-b353-d7a2464da3c9 service nova] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] No waiting events found dispatching network-vif-plugged-1e919367-5786-4b72-b1e9-31aed453f5fc {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1016.504290] env[68674]: WARNING nova.compute.manager [req-ed68d787-6c60-4253-96d0-a7a54bd5152a req-5fbf285e-eccf-4ff6-b353-d7a2464da3c9 service nova] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Received unexpected event network-vif-plugged-1e919367-5786-4b72-b1e9-31aed453f5fc for instance with vm_state building and task_state spawning. [ 1016.508461] env[68674]: DEBUG oslo_vmware.api [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240614, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.513485] env[68674]: DEBUG oslo_vmware.api [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240615, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.579611] env[68674]: DEBUG oslo_concurrency.lockutils [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.442s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1016.580475] env[68674]: DEBUG nova.compute.manager [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1016.585365] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.751s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1016.587094] env[68674]: INFO nova.compute.claims [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1016.599324] env[68674]: DEBUG nova.network.neutron [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Successfully updated port: 1e919367-5786-4b72-b1e9-31aed453f5fc {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1016.604347] env[68674]: DEBUG nova.virt.hardware [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1016.604347] env[68674]: DEBUG nova.virt.hardware [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1016.604347] env[68674]: DEBUG nova.virt.hardware [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1016.604347] env[68674]: DEBUG nova.virt.hardware [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1016.604347] env[68674]: DEBUG nova.virt.hardware [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1016.604347] env[68674]: DEBUG nova.virt.hardware [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1016.604347] env[68674]: DEBUG nova.virt.hardware [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1016.604909] env[68674]: DEBUG nova.virt.hardware [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1016.605259] env[68674]: DEBUG nova.virt.hardware [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1016.605600] env[68674]: DEBUG nova.virt.hardware [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1016.605916] env[68674]: DEBUG nova.virt.hardware [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1016.607450] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e84b93b-3775-40c8-b750-9e81bab3af17 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.617185] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bc97944-9169-40af-87a1-abf55f55c4c8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.634983] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Instance VIF info [] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1016.641115] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1016.642081] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1016.642717] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0bf94c49-fb66-4e18-92bb-1210daa56ae1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.662550] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1016.662550] env[68674]: value = "task-3240616" [ 1016.662550] env[68674]: _type = "Task" [ 1016.662550] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.671484] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240616, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.005519] env[68674]: DEBUG oslo_vmware.api [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240614, 'name': ReconfigVM_Task, 'duration_secs': 0.89305} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.008367] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Reconfigured VM instance instance-00000060 to attach disk [datastore2] 182deaf0-c20a-4041-8f41-81786d6b053e/182deaf0-c20a-4041-8f41-81786d6b053e.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1017.009031] env[68674]: DEBUG oslo_vmware.api [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240615, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.705096} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.009219] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c7ad5a13-0fba-4606-a80e-ba1f48ee301a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.010645] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1017.010834] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1017.011014] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1017.011195] env[68674]: INFO nova.compute.manager [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Took 1.66 seconds to destroy the instance on the hypervisor. [ 1017.011459] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1017.011656] env[68674]: DEBUG nova.compute.manager [-] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1017.011744] env[68674]: DEBUG nova.network.neutron [-] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1017.020940] env[68674]: DEBUG oslo_vmware.api [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1017.020940] env[68674]: value = "task-3240617" [ 1017.020940] env[68674]: _type = "Task" [ 1017.020940] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.029189] env[68674]: DEBUG oslo_vmware.api [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240617, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.097038] env[68674]: DEBUG nova.compute.utils [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1017.100462] env[68674]: DEBUG nova.compute.manager [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1017.100639] env[68674]: DEBUG nova.network.neutron [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1017.107305] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "refresh_cache-79ee95b6-7321-4e33-a0e4-2c8ed1bc1031" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.107475] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquired lock "refresh_cache-79ee95b6-7321-4e33-a0e4-2c8ed1bc1031" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1017.107643] env[68674]: DEBUG nova.network.neutron [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1017.164189] env[68674]: DEBUG nova.policy [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fd6c4d1912754a2ea44a65b455b7413c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '21163cbc3a5a4dc3abc832c4560c33e2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 1017.174749] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240616, 'name': CreateVM_Task, 'duration_secs': 0.290844} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.174925] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1017.175332] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.175490] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1017.175819] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1017.176085] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0da2daf8-7b04-4162-b751-a1ac405160c9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.181104] env[68674]: DEBUG oslo_vmware.api [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Waiting for the task: (returnval){ [ 1017.181104] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]520ae261-b64d-1246-ccf2-c6a4e9efad10" [ 1017.181104] env[68674]: _type = "Task" [ 1017.181104] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.193748] env[68674]: DEBUG oslo_vmware.api [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]520ae261-b64d-1246-ccf2-c6a4e9efad10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.531923] env[68674]: DEBUG oslo_vmware.api [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240617, 'name': Rename_Task, 'duration_secs': 0.18061} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.532232] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1017.533048] env[68674]: DEBUG nova.network.neutron [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Successfully created port: 270836ed-f229-45ed-b23b-58f26fa997be {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1017.534879] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b6ed5f2b-9046-457b-bb50-44b4d85ba7b8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.542674] env[68674]: DEBUG oslo_vmware.api [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1017.542674] env[68674]: value = "task-3240618" [ 1017.542674] env[68674]: _type = "Task" [ 1017.542674] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.553150] env[68674]: DEBUG oslo_vmware.api [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240618, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.601568] env[68674]: DEBUG nova.compute.manager [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1017.651304] env[68674]: DEBUG nova.network.neutron [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1017.696568] env[68674]: DEBUG oslo_vmware.api [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]520ae261-b64d-1246-ccf2-c6a4e9efad10, 'name': SearchDatastore_Task, 'duration_secs': 0.012284} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.696568] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1017.696568] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1017.696767] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.698029] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1017.698029] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1017.698029] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a54a72bf-0dd7-445a-84fd-820e87efc0e3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.708692] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1017.708891] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1017.709629] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f88e441-cd94-49b9-96fc-8ab77c1f34ee {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.715150] env[68674]: DEBUG oslo_vmware.api [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Waiting for the task: (returnval){ [ 1017.715150] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528b74af-e6f5-35a0-daa4-e413259d4d96" [ 1017.715150] env[68674]: _type = "Task" [ 1017.715150] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.726836] env[68674]: DEBUG oslo_vmware.api [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528b74af-e6f5-35a0-daa4-e413259d4d96, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.824073] env[68674]: DEBUG nova.network.neutron [-] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1017.869300] env[68674]: DEBUG nova.network.neutron [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Updating instance_info_cache with network_info: [{"id": "1e919367-5786-4b72-b1e9-31aed453f5fc", "address": "fa:16:3e:42:df:89", "network": {"id": "d412f884-932c-461f-8f04-990897b04532", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-692483706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6b179855b874365964446f95f9f5a53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e919367-57", "ovs_interfaceid": "1e919367-5786-4b72-b1e9-31aed453f5fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1017.920507] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b491be4b-05a0-4e4e-a728-bfb269d0f699 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.931481] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffd3ef79-01c9-4de1-8df8-95d8aebef559 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.965725] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2654f28c-9977-4f70-bdfe-3b4b39ad9294 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.974324] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b86663c0-e125-445b-8c7a-a0fed66d2748 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.989498] env[68674]: DEBUG nova.compute.provider_tree [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1018.053354] env[68674]: DEBUG oslo_vmware.api [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240618, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.226264] env[68674]: DEBUG oslo_vmware.api [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528b74af-e6f5-35a0-daa4-e413259d4d96, 'name': SearchDatastore_Task, 'duration_secs': 0.012457} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.227107] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-065fe1cb-79d3-4e9f-bdfd-d4c7cdfab4f9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.233434] env[68674]: DEBUG oslo_vmware.api [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Waiting for the task: (returnval){ [ 1018.233434] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52047f0a-e6cf-acb9-c013-6ed16a3bb708" [ 1018.233434] env[68674]: _type = "Task" [ 1018.233434] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.243187] env[68674]: DEBUG oslo_vmware.api [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52047f0a-e6cf-acb9-c013-6ed16a3bb708, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.328257] env[68674]: INFO nova.compute.manager [-] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Took 1.32 seconds to deallocate network for instance. [ 1018.374965] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Releasing lock "refresh_cache-79ee95b6-7321-4e33-a0e4-2c8ed1bc1031" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1018.374965] env[68674]: DEBUG nova.compute.manager [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Instance network_info: |[{"id": "1e919367-5786-4b72-b1e9-31aed453f5fc", "address": "fa:16:3e:42:df:89", "network": {"id": "d412f884-932c-461f-8f04-990897b04532", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-692483706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6b179855b874365964446f95f9f5a53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e919367-57", "ovs_interfaceid": "1e919367-5786-4b72-b1e9-31aed453f5fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1018.375156] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:42:df:89', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '08fb4857-7f9b-4f97-86ef-415341fb595d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1e919367-5786-4b72-b1e9-31aed453f5fc', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1018.383543] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1018.383996] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1018.384271] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8c2bf622-fd7b-4efb-996d-973e3a476902 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.405596] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1018.405596] env[68674]: value = "task-3240619" [ 1018.405596] env[68674]: _type = "Task" [ 1018.405596] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.414195] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240619, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.493077] env[68674]: DEBUG nova.scheduler.client.report [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1018.533218] env[68674]: DEBUG nova.compute.manager [req-35cfaa06-57b2-4bcc-b96f-9f0198c0e9e5 req-054a9ac5-4b5d-4f0f-8790-d47a894e7674 service nova] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Received event network-changed-1e919367-5786-4b72-b1e9-31aed453f5fc {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1018.533218] env[68674]: DEBUG nova.compute.manager [req-35cfaa06-57b2-4bcc-b96f-9f0198c0e9e5 req-054a9ac5-4b5d-4f0f-8790-d47a894e7674 service nova] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Refreshing instance network info cache due to event network-changed-1e919367-5786-4b72-b1e9-31aed453f5fc. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1018.533218] env[68674]: DEBUG oslo_concurrency.lockutils [req-35cfaa06-57b2-4bcc-b96f-9f0198c0e9e5 req-054a9ac5-4b5d-4f0f-8790-d47a894e7674 service nova] Acquiring lock "refresh_cache-79ee95b6-7321-4e33-a0e4-2c8ed1bc1031" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.533218] env[68674]: DEBUG oslo_concurrency.lockutils [req-35cfaa06-57b2-4bcc-b96f-9f0198c0e9e5 req-054a9ac5-4b5d-4f0f-8790-d47a894e7674 service nova] Acquired lock "refresh_cache-79ee95b6-7321-4e33-a0e4-2c8ed1bc1031" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1018.533739] env[68674]: DEBUG nova.network.neutron [req-35cfaa06-57b2-4bcc-b96f-9f0198c0e9e5 req-054a9ac5-4b5d-4f0f-8790-d47a894e7674 service nova] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Refreshing network info cache for port 1e919367-5786-4b72-b1e9-31aed453f5fc {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1018.556516] env[68674]: DEBUG oslo_vmware.api [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240618, 'name': PowerOnVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.614211] env[68674]: DEBUG nova.compute.manager [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1018.647736] env[68674]: DEBUG nova.virt.hardware [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1018.648189] env[68674]: DEBUG nova.virt.hardware [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1018.648414] env[68674]: DEBUG nova.virt.hardware [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1018.648647] env[68674]: DEBUG nova.virt.hardware [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1018.648824] env[68674]: DEBUG nova.virt.hardware [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1018.648980] env[68674]: DEBUG nova.virt.hardware [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1018.649214] env[68674]: DEBUG nova.virt.hardware [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1018.649373] env[68674]: DEBUG nova.virt.hardware [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1018.649538] env[68674]: DEBUG nova.virt.hardware [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1018.649707] env[68674]: DEBUG nova.virt.hardware [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1018.649880] env[68674]: DEBUG nova.virt.hardware [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1018.650775] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f20de3f8-ab8e-4a31-92fa-a8ae48c0ca64 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.662153] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39001151-96a0-4ec1-9d3b-f3eda69d1666 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.745422] env[68674]: DEBUG oslo_vmware.api [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52047f0a-e6cf-acb9-c013-6ed16a3bb708, 'name': SearchDatastore_Task, 'duration_secs': 0.010579} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.746657] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1018.746657] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] cab97ca7-968b-4d40-bb1f-2244469e1b56/cab97ca7-968b-4d40-bb1f-2244469e1b56.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1018.746957] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-48b316f3-7f0b-492f-b9e2-1c9f76dd76ed {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.754799] env[68674]: DEBUG oslo_vmware.api [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Waiting for the task: (returnval){ [ 1018.754799] env[68674]: value = "task-3240620" [ 1018.754799] env[68674]: _type = "Task" [ 1018.754799] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.763781] env[68674]: DEBUG oslo_vmware.api [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': task-3240620, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.834871] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1018.915960] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240619, 'name': CreateVM_Task, 'duration_secs': 0.357775} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.916788] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1018.917620] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.917812] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1018.918196] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1018.918472] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-571ddddb-a744-4727-9343-801de950ef07 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.924378] env[68674]: DEBUG oslo_vmware.api [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 1018.924378] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ba8257-727a-1804-1e0c-1b8b5292b63f" [ 1018.924378] env[68674]: _type = "Task" [ 1018.924378] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.933719] env[68674]: DEBUG oslo_vmware.api [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ba8257-727a-1804-1e0c-1b8b5292b63f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.999122] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.414s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1018.999886] env[68674]: DEBUG nova.compute.manager [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1019.003319] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8ffb2324-d766-4c78-bb81-70c900b73d95 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.098s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1019.003472] env[68674]: DEBUG nova.objects.instance [None req-8ffb2324-d766-4c78-bb81-70c900b73d95 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Lazy-loading 'resources' on Instance uuid 7a13c52a-328a-4baa-827f-4f2e9cd29269 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1019.056898] env[68674]: DEBUG oslo_vmware.api [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240618, 'name': PowerOnVM_Task, 'duration_secs': 1.135341} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.057194] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1019.057382] env[68674]: INFO nova.compute.manager [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Took 10.19 seconds to spawn the instance on the hypervisor. [ 1019.057607] env[68674]: DEBUG nova.compute.manager [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1019.058371] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-659a2c7c-8542-4049-a726-42c23d29176d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.268848] env[68674]: DEBUG oslo_vmware.api [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': task-3240620, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.324256] env[68674]: DEBUG nova.network.neutron [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Successfully updated port: 270836ed-f229-45ed-b23b-58f26fa997be {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1019.374869] env[68674]: DEBUG nova.network.neutron [req-35cfaa06-57b2-4bcc-b96f-9f0198c0e9e5 req-054a9ac5-4b5d-4f0f-8790-d47a894e7674 service nova] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Updated VIF entry in instance network info cache for port 1e919367-5786-4b72-b1e9-31aed453f5fc. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1019.375271] env[68674]: DEBUG nova.network.neutron [req-35cfaa06-57b2-4bcc-b96f-9f0198c0e9e5 req-054a9ac5-4b5d-4f0f-8790-d47a894e7674 service nova] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Updating instance_info_cache with network_info: [{"id": "1e919367-5786-4b72-b1e9-31aed453f5fc", "address": "fa:16:3e:42:df:89", "network": {"id": "d412f884-932c-461f-8f04-990897b04532", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-692483706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6b179855b874365964446f95f9f5a53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e919367-57", "ovs_interfaceid": "1e919367-5786-4b72-b1e9-31aed453f5fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1019.435499] env[68674]: DEBUG oslo_vmware.api [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ba8257-727a-1804-1e0c-1b8b5292b63f, 'name': SearchDatastore_Task, 'duration_secs': 0.01172} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.435743] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1019.436545] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1019.436545] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.436545] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1019.436691] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1019.436886] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-48b5d019-69ae-4517-a233-4989780d5e06 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.451079] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1019.451393] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1019.452627] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6bf5b745-7717-4201-b486-cf0fbc62809f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.458737] env[68674]: DEBUG oslo_vmware.api [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 1019.458737] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5238dc23-cd0d-ee35-a8ea-89c6c6b03e68" [ 1019.458737] env[68674]: _type = "Task" [ 1019.458737] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.467324] env[68674]: DEBUG oslo_vmware.api [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5238dc23-cd0d-ee35-a8ea-89c6c6b03e68, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.507964] env[68674]: DEBUG nova.compute.utils [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1019.511625] env[68674]: DEBUG nova.compute.manager [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1019.511625] env[68674]: DEBUG nova.network.neutron [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1019.569432] env[68674]: DEBUG nova.policy [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd4306c590bce41dfb83eb474079deee4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9bc5a5f88cdd441fbb0df17cab2fcecc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 1019.579986] env[68674]: INFO nova.compute.manager [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Took 23.39 seconds to build instance. [ 1019.767928] env[68674]: DEBUG oslo_vmware.api [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': task-3240620, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.692463} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.768246] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] cab97ca7-968b-4d40-bb1f-2244469e1b56/cab97ca7-968b-4d40-bb1f-2244469e1b56.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1019.768445] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1019.768699] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b8e78238-7798-47c2-8d20-2db3421bb6ac {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.776340] env[68674]: DEBUG oslo_vmware.api [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Waiting for the task: (returnval){ [ 1019.776340] env[68674]: value = "task-3240621" [ 1019.776340] env[68674]: _type = "Task" [ 1019.776340] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.788410] env[68674]: DEBUG oslo_vmware.api [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': task-3240621, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.793915] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07fec030-f72a-4138-b432-e14cac4f10e0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.802064] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cf6a4ba-1e37-40b1-8a5e-46c63d27d0d0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.833453] env[68674]: DEBUG oslo_concurrency.lockutils [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "refresh_cache-66f4ab32-ef66-4d1d-93b6-775d59ce3c41" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.833618] env[68674]: DEBUG oslo_concurrency.lockutils [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquired lock "refresh_cache-66f4ab32-ef66-4d1d-93b6-775d59ce3c41" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1019.833774] env[68674]: DEBUG nova.network.neutron [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1019.835636] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10d66f44-146b-409a-8717-f9b689e1c38c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.848431] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41aec076-4611-49af-8386-c9cf6172260c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.862374] env[68674]: DEBUG nova.compute.provider_tree [None req-8ffb2324-d766-4c78-bb81-70c900b73d95 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1019.880472] env[68674]: DEBUG oslo_concurrency.lockutils [req-35cfaa06-57b2-4bcc-b96f-9f0198c0e9e5 req-054a9ac5-4b5d-4f0f-8790-d47a894e7674 service nova] Releasing lock "refresh_cache-79ee95b6-7321-4e33-a0e4-2c8ed1bc1031" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1019.880740] env[68674]: DEBUG nova.compute.manager [req-35cfaa06-57b2-4bcc-b96f-9f0198c0e9e5 req-054a9ac5-4b5d-4f0f-8790-d47a894e7674 service nova] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Received event network-vif-deleted-c9e9cacf-59be-4854-a352-111921b372e2 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1019.897276] env[68674]: DEBUG nova.network.neutron [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Successfully created port: 4d94c698-e74c-4238-8f2e-ead75015687e {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1019.970161] env[68674]: DEBUG oslo_vmware.api [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5238dc23-cd0d-ee35-a8ea-89c6c6b03e68, 'name': SearchDatastore_Task, 'duration_secs': 0.031464} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.971023] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a07dd217-ba17-4f86-bcbe-6ba8bea56db0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.976971] env[68674]: DEBUG oslo_vmware.api [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 1019.976971] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]522ce98d-68e3-3fb0-5024-6d7486ba5174" [ 1019.976971] env[68674]: _type = "Task" [ 1019.976971] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.985316] env[68674]: DEBUG oslo_vmware.api [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]522ce98d-68e3-3fb0-5024-6d7486ba5174, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.011594] env[68674]: DEBUG nova.compute.manager [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1020.082013] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f74584d5-4625-4e3a-a56e-e063c226f484 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "182deaf0-c20a-4041-8f41-81786d6b053e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.905s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.288622] env[68674]: DEBUG oslo_vmware.api [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': task-3240621, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072936} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.288879] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1020.289725] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39e1f089-9090-4a42-8f91-4ed451482862 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.313674] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] cab97ca7-968b-4d40-bb1f-2244469e1b56/cab97ca7-968b-4d40-bb1f-2244469e1b56.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1020.314658] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-55125117-599e-4ea5-a4aa-bc42f6601c0a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.341007] env[68674]: DEBUG oslo_vmware.api [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Waiting for the task: (returnval){ [ 1020.341007] env[68674]: value = "task-3240622" [ 1020.341007] env[68674]: _type = "Task" [ 1020.341007] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.351593] env[68674]: DEBUG oslo_vmware.api [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': task-3240622, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.364018] env[68674]: DEBUG nova.scheduler.client.report [None req-8ffb2324-d766-4c78-bb81-70c900b73d95 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1020.401907] env[68674]: DEBUG nova.network.neutron [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1020.489073] env[68674]: DEBUG oslo_vmware.api [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]522ce98d-68e3-3fb0-5024-6d7486ba5174, 'name': SearchDatastore_Task, 'duration_secs': 0.056711} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.489359] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1020.489660] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031/79ee95b6-7321-4e33-a0e4-2c8ed1bc1031.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1020.489958] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-66766f67-344f-42b5-921c-78e6281f7a78 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.499720] env[68674]: DEBUG oslo_vmware.api [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 1020.499720] env[68674]: value = "task-3240623" [ 1020.499720] env[68674]: _type = "Task" [ 1020.499720] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.512338] env[68674]: DEBUG oslo_vmware.api [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240623, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.587357] env[68674]: DEBUG nova.compute.manager [req-be7a8722-0dda-41b9-b212-bfc69030a88d req-422e133a-8d3c-4288-a810-54af0981c102 service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Received event network-vif-plugged-270836ed-f229-45ed-b23b-58f26fa997be {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1020.587606] env[68674]: DEBUG oslo_concurrency.lockutils [req-be7a8722-0dda-41b9-b212-bfc69030a88d req-422e133a-8d3c-4288-a810-54af0981c102 service nova] Acquiring lock "66f4ab32-ef66-4d1d-93b6-775d59ce3c41-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1020.588556] env[68674]: DEBUG oslo_concurrency.lockutils [req-be7a8722-0dda-41b9-b212-bfc69030a88d req-422e133a-8d3c-4288-a810-54af0981c102 service nova] Lock "66f4ab32-ef66-4d1d-93b6-775d59ce3c41-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1020.588556] env[68674]: DEBUG oslo_concurrency.lockutils [req-be7a8722-0dda-41b9-b212-bfc69030a88d req-422e133a-8d3c-4288-a810-54af0981c102 service nova] Lock "66f4ab32-ef66-4d1d-93b6-775d59ce3c41-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.588556] env[68674]: DEBUG nova.compute.manager [req-be7a8722-0dda-41b9-b212-bfc69030a88d req-422e133a-8d3c-4288-a810-54af0981c102 service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] No waiting events found dispatching network-vif-plugged-270836ed-f229-45ed-b23b-58f26fa997be {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1020.588781] env[68674]: WARNING nova.compute.manager [req-be7a8722-0dda-41b9-b212-bfc69030a88d req-422e133a-8d3c-4288-a810-54af0981c102 service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Received unexpected event network-vif-plugged-270836ed-f229-45ed-b23b-58f26fa997be for instance with vm_state building and task_state spawning. [ 1020.588781] env[68674]: DEBUG nova.compute.manager [req-be7a8722-0dda-41b9-b212-bfc69030a88d req-422e133a-8d3c-4288-a810-54af0981c102 service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Received event network-changed-270836ed-f229-45ed-b23b-58f26fa997be {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1020.588962] env[68674]: DEBUG nova.compute.manager [req-be7a8722-0dda-41b9-b212-bfc69030a88d req-422e133a-8d3c-4288-a810-54af0981c102 service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Refreshing instance network info cache due to event network-changed-270836ed-f229-45ed-b23b-58f26fa997be. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1020.589166] env[68674]: DEBUG oslo_concurrency.lockutils [req-be7a8722-0dda-41b9-b212-bfc69030a88d req-422e133a-8d3c-4288-a810-54af0981c102 service nova] Acquiring lock "refresh_cache-66f4ab32-ef66-4d1d-93b6-775d59ce3c41" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.641701] env[68674]: DEBUG nova.network.neutron [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Updating instance_info_cache with network_info: [{"id": "270836ed-f229-45ed-b23b-58f26fa997be", "address": "fa:16:3e:50:c3:4d", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap270836ed-f2", "ovs_interfaceid": "270836ed-f229-45ed-b23b-58f26fa997be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1020.853878] env[68674]: DEBUG oslo_vmware.api [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': task-3240622, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.870934] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8ffb2324-d766-4c78-bb81-70c900b73d95 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.868s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.873545] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d9e530f6-87ce-409b-9d98-50898ab84a46 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.699s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1020.873718] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d9e530f6-87ce-409b-9d98-50898ab84a46 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.875665] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9bb376f2-4499-45d7-b714-96c00ee536fd tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.620s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1020.875974] env[68674]: DEBUG nova.objects.instance [None req-9bb376f2-4499-45d7-b714-96c00ee536fd tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Lazy-loading 'resources' on Instance uuid a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1020.901898] env[68674]: INFO nova.scheduler.client.report [None req-d9e530f6-87ce-409b-9d98-50898ab84a46 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Deleted allocations for instance ffdd1c62-1b4e-40cf-a27e-ff2877439701 [ 1020.903080] env[68674]: INFO nova.scheduler.client.report [None req-8ffb2324-d766-4c78-bb81-70c900b73d95 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Deleted allocations for instance 7a13c52a-328a-4baa-827f-4f2e9cd29269 [ 1021.013230] env[68674]: DEBUG oslo_vmware.api [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240623, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.022984] env[68674]: DEBUG nova.compute.manager [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1021.053320] env[68674]: DEBUG nova.virt.hardware [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1021.053731] env[68674]: DEBUG nova.virt.hardware [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1021.053959] env[68674]: DEBUG nova.virt.hardware [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1021.054518] env[68674]: DEBUG nova.virt.hardware [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1021.054518] env[68674]: DEBUG nova.virt.hardware [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1021.054693] env[68674]: DEBUG nova.virt.hardware [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1021.055023] env[68674]: DEBUG nova.virt.hardware [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1021.055251] env[68674]: DEBUG nova.virt.hardware [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1021.055443] env[68674]: DEBUG nova.virt.hardware [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1021.055630] env[68674]: DEBUG nova.virt.hardware [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1021.055832] env[68674]: DEBUG nova.virt.hardware [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1021.056828] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61f34cfc-4ad1-4866-a8ca-375a0c7f2e8c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.069872] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f7982f2-80ad-4618-9bff-2182a4b374dd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.144342] env[68674]: DEBUG oslo_concurrency.lockutils [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Releasing lock "refresh_cache-66f4ab32-ef66-4d1d-93b6-775d59ce3c41" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1021.144772] env[68674]: DEBUG nova.compute.manager [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Instance network_info: |[{"id": "270836ed-f229-45ed-b23b-58f26fa997be", "address": "fa:16:3e:50:c3:4d", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap270836ed-f2", "ovs_interfaceid": "270836ed-f229-45ed-b23b-58f26fa997be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1021.145405] env[68674]: DEBUG oslo_concurrency.lockutils [req-be7a8722-0dda-41b9-b212-bfc69030a88d req-422e133a-8d3c-4288-a810-54af0981c102 service nova] Acquired lock "refresh_cache-66f4ab32-ef66-4d1d-93b6-775d59ce3c41" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1021.145405] env[68674]: DEBUG nova.network.neutron [req-be7a8722-0dda-41b9-b212-bfc69030a88d req-422e133a-8d3c-4288-a810-54af0981c102 service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Refreshing network info cache for port 270836ed-f229-45ed-b23b-58f26fa997be {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1021.146705] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:c3:4d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea4fe416-47a6-4542-b59d-8c71ab4d6503', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '270836ed-f229-45ed-b23b-58f26fa997be', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1021.156547] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1021.159983] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1021.160597] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2cddc2fc-1ea9-405b-8907-fa8a320c1ab0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.183789] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1021.183789] env[68674]: value = "task-3240624" [ 1021.183789] env[68674]: _type = "Task" [ 1021.183789] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.192897] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240624, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.355020] env[68674]: DEBUG oslo_vmware.api [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': task-3240622, 'name': ReconfigVM_Task, 'duration_secs': 0.549523} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.355020] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Reconfigured VM instance instance-0000005f to attach disk [datastore2] cab97ca7-968b-4d40-bb1f-2244469e1b56/cab97ca7-968b-4d40-bb1f-2244469e1b56.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1021.355020] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-01223c39-5ff6-4583-b735-2a05828b3ebb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.362665] env[68674]: DEBUG oslo_vmware.api [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Waiting for the task: (returnval){ [ 1021.362665] env[68674]: value = "task-3240625" [ 1021.362665] env[68674]: _type = "Task" [ 1021.362665] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.375456] env[68674]: DEBUG oslo_vmware.api [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': task-3240625, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.413463] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8ffb2324-d766-4c78-bb81-70c900b73d95 tempest-ServerDiagnosticsNegativeTest-92297839 tempest-ServerDiagnosticsNegativeTest-92297839-project-member] Lock "7a13c52a-328a-4baa-827f-4f2e9cd29269" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.092s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1021.417631] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d9e530f6-87ce-409b-9d98-50898ab84a46 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "ffdd1c62-1b4e-40cf-a27e-ff2877439701" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.754s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1021.516538] env[68674]: DEBUG oslo_vmware.api [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240623, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.699919} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.517013] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031/79ee95b6-7321-4e33-a0e4-2c8ed1bc1031.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1021.517257] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1021.517653] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-87bc743a-2836-49d7-8778-959344307991 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.531311] env[68674]: DEBUG oslo_vmware.api [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 1021.531311] env[68674]: value = "task-3240626" [ 1021.531311] env[68674]: _type = "Task" [ 1021.531311] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.545460] env[68674]: DEBUG oslo_vmware.api [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240626, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.698029] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240624, 'name': CreateVM_Task, 'duration_secs': 0.385319} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.698287] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1021.699130] env[68674]: DEBUG oslo_concurrency.lockutils [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1021.699380] env[68674]: DEBUG oslo_concurrency.lockutils [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1021.700070] env[68674]: DEBUG oslo_concurrency.lockutils [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1021.700887] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9e3892e-4e27-498b-950b-9ea2d5958658 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.706561] env[68674]: DEBUG oslo_vmware.api [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 1021.706561] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525e2493-b6f4-2328-e9f7-32e3e593b974" [ 1021.706561] env[68674]: _type = "Task" [ 1021.706561] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.715604] env[68674]: DEBUG oslo_vmware.api [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525e2493-b6f4-2328-e9f7-32e3e593b974, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.791854] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d43b05a-4817-4b45-9f62-f82c101258a9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.801763] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ec68836-2227-4092-bafa-512e5f34b005 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.836084] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13dfb740-925d-49bc-9f87-4764a4e5d099 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.850909] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a45922d3-1e9b-42aa-bb06-bac8e9c71818 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.869980] env[68674]: DEBUG nova.compute.provider_tree [None req-9bb376f2-4499-45d7-b714-96c00ee536fd tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1021.879980] env[68674]: DEBUG oslo_vmware.api [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': task-3240625, 'name': Rename_Task, 'duration_secs': 0.194697} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.881438] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1021.881438] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0a4ed918-4f56-4511-a5fd-11ebc4c5b0eb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.889941] env[68674]: DEBUG oslo_vmware.api [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Waiting for the task: (returnval){ [ 1021.889941] env[68674]: value = "task-3240627" [ 1021.889941] env[68674]: _type = "Task" [ 1021.889941] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.901031] env[68674]: DEBUG oslo_vmware.api [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': task-3240627, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.903505] env[68674]: DEBUG nova.network.neutron [req-be7a8722-0dda-41b9-b212-bfc69030a88d req-422e133a-8d3c-4288-a810-54af0981c102 service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Updated VIF entry in instance network info cache for port 270836ed-f229-45ed-b23b-58f26fa997be. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1021.903505] env[68674]: DEBUG nova.network.neutron [req-be7a8722-0dda-41b9-b212-bfc69030a88d req-422e133a-8d3c-4288-a810-54af0981c102 service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Updating instance_info_cache with network_info: [{"id": "270836ed-f229-45ed-b23b-58f26fa997be", "address": "fa:16:3e:50:c3:4d", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap270836ed-f2", "ovs_interfaceid": "270836ed-f229-45ed-b23b-58f26fa997be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1022.028670] env[68674]: DEBUG nova.network.neutron [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Successfully updated port: 4d94c698-e74c-4238-8f2e-ead75015687e {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1022.041764] env[68674]: DEBUG oslo_vmware.api [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240626, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075095} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.042388] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1022.043053] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2280f4e0-5e64-4dc8-a171-b2ab3f0f2698 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.066905] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Reconfiguring VM instance instance-00000061 to attach disk [datastore2] 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031/79ee95b6-7321-4e33-a0e4-2c8ed1bc1031.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1022.067495] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bde45bb6-f122-4a69-9f90-e193ebe35ba5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.088618] env[68674]: DEBUG oslo_vmware.api [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 1022.088618] env[68674]: value = "task-3240628" [ 1022.088618] env[68674]: _type = "Task" [ 1022.088618] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.097541] env[68674]: DEBUG oslo_vmware.api [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240628, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.224612] env[68674]: DEBUG oslo_vmware.api [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525e2493-b6f4-2328-e9f7-32e3e593b974, 'name': SearchDatastore_Task, 'duration_secs': 0.010163} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.225130] env[68674]: DEBUG oslo_concurrency.lockutils [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1022.225338] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1022.225615] env[68674]: DEBUG oslo_concurrency.lockutils [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.225785] env[68674]: DEBUG oslo_concurrency.lockutils [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1022.226109] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1022.226478] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e36dec96-1134-4873-90b4-d61cf22ee4ec {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.238918] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1022.239245] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1022.240368] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23930ddf-d1c0-4fe7-83dd-f952dcb2e034 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.247149] env[68674]: DEBUG oslo_vmware.api [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 1022.247149] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b60063-65b7-1865-3e3e-4292abc2fb8a" [ 1022.247149] env[68674]: _type = "Task" [ 1022.247149] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.256123] env[68674]: DEBUG oslo_vmware.api [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b60063-65b7-1865-3e3e-4292abc2fb8a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.375577] env[68674]: DEBUG nova.scheduler.client.report [None req-9bb376f2-4499-45d7-b714-96c00ee536fd tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1022.401284] env[68674]: DEBUG oslo_vmware.api [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': task-3240627, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.404956] env[68674]: DEBUG oslo_concurrency.lockutils [req-be7a8722-0dda-41b9-b212-bfc69030a88d req-422e133a-8d3c-4288-a810-54af0981c102 service nova] Releasing lock "refresh_cache-66f4ab32-ef66-4d1d-93b6-775d59ce3c41" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1022.535699] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquiring lock "refresh_cache-142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.535861] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquired lock "refresh_cache-142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1022.536032] env[68674]: DEBUG nova.network.neutron [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1022.599251] env[68674]: DEBUG oslo_vmware.api [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240628, 'name': ReconfigVM_Task, 'duration_secs': 0.306599} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.599568] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Reconfigured VM instance instance-00000061 to attach disk [datastore2] 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031/79ee95b6-7321-4e33-a0e4-2c8ed1bc1031.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1022.600243] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d6576452-a173-4ab4-8e0f-d206481c547f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.609739] env[68674]: DEBUG oslo_vmware.api [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 1022.609739] env[68674]: value = "task-3240629" [ 1022.609739] env[68674]: _type = "Task" [ 1022.609739] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.619065] env[68674]: DEBUG oslo_vmware.api [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240629, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.656321] env[68674]: DEBUG nova.compute.manager [req-67b78b38-d1a2-48a9-addf-d9ca7ecf23dc req-d788d4d4-b13b-4f29-b66e-97c451af1fe9 service nova] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Received event network-changed-4227b11c-e4da-42b5-80c0-af15c0b4de7f {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1022.656321] env[68674]: DEBUG nova.compute.manager [req-67b78b38-d1a2-48a9-addf-d9ca7ecf23dc req-d788d4d4-b13b-4f29-b66e-97c451af1fe9 service nova] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Refreshing instance network info cache due to event network-changed-4227b11c-e4da-42b5-80c0-af15c0b4de7f. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1022.656502] env[68674]: DEBUG oslo_concurrency.lockutils [req-67b78b38-d1a2-48a9-addf-d9ca7ecf23dc req-d788d4d4-b13b-4f29-b66e-97c451af1fe9 service nova] Acquiring lock "refresh_cache-182deaf0-c20a-4041-8f41-81786d6b053e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.656646] env[68674]: DEBUG oslo_concurrency.lockutils [req-67b78b38-d1a2-48a9-addf-d9ca7ecf23dc req-d788d4d4-b13b-4f29-b66e-97c451af1fe9 service nova] Acquired lock "refresh_cache-182deaf0-c20a-4041-8f41-81786d6b053e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1022.656814] env[68674]: DEBUG nova.network.neutron [req-67b78b38-d1a2-48a9-addf-d9ca7ecf23dc req-d788d4d4-b13b-4f29-b66e-97c451af1fe9 service nova] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Refreshing network info cache for port 4227b11c-e4da-42b5-80c0-af15c0b4de7f {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1022.758457] env[68674]: DEBUG oslo_vmware.api [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b60063-65b7-1865-3e3e-4292abc2fb8a, 'name': SearchDatastore_Task, 'duration_secs': 0.020206} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.759470] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eae79c19-06a0-40df-b684-9ee02eeefb5b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.766069] env[68674]: DEBUG oslo_vmware.api [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 1022.766069] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f16b27-a3f7-5b98-549f-70543d605f77" [ 1022.766069] env[68674]: _type = "Task" [ 1022.766069] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.776511] env[68674]: DEBUG oslo_vmware.api [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f16b27-a3f7-5b98-549f-70543d605f77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.882830] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9bb376f2-4499-45d7-b714-96c00ee536fd tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.006s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1022.884195] env[68674]: DEBUG oslo_concurrency.lockutils [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.500s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1022.885822] env[68674]: INFO nova.compute.claims [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1022.901182] env[68674]: DEBUG oslo_vmware.api [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': task-3240627, 'name': PowerOnVM_Task, 'duration_secs': 0.909189} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.901597] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1022.901697] env[68674]: DEBUG nova.compute.manager [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1022.902536] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbdfb251-3efc-4436-84a4-e59b29adfa7f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.914907] env[68674]: INFO nova.scheduler.client.report [None req-9bb376f2-4499-45d7-b714-96c00ee536fd tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Deleted allocations for instance a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6 [ 1023.067109] env[68674]: DEBUG nova.network.neutron [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1023.120295] env[68674]: DEBUG oslo_vmware.api [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240629, 'name': Rename_Task, 'duration_secs': 0.155727} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.120709] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1023.121075] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a2cdd6df-a947-413c-aeab-fe73c779ae33 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.131960] env[68674]: DEBUG oslo_vmware.api [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 1023.131960] env[68674]: value = "task-3240630" [ 1023.131960] env[68674]: _type = "Task" [ 1023.131960] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.140503] env[68674]: DEBUG oslo_vmware.api [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240630, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.252553] env[68674]: DEBUG nova.network.neutron [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Updating instance_info_cache with network_info: [{"id": "4d94c698-e74c-4238-8f2e-ead75015687e", "address": "fa:16:3e:2c:a1:73", "network": {"id": "e5c1d0d2-3458-4788-9640-4e14ad781436", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1292108367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9bc5a5f88cdd441fbb0df17cab2fcecc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8cbc9b8f-ce19-4262-bf4d-88cd4f259a1c", "external-id": "nsx-vlan-transportzone-630", "segmentation_id": 630, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d94c698-e7", "ovs_interfaceid": "4d94c698-e74c-4238-8f2e-ead75015687e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.277553] env[68674]: DEBUG oslo_vmware.api [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f16b27-a3f7-5b98-549f-70543d605f77, 'name': SearchDatastore_Task, 'duration_secs': 0.012079} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.277817] env[68674]: DEBUG oslo_concurrency.lockutils [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1023.278256] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 66f4ab32-ef66-4d1d-93b6-775d59ce3c41/66f4ab32-ef66-4d1d-93b6-775d59ce3c41.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1023.278518] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-07aad159-21d3-4fe8-b9cb-e7fa30df8f99 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.286258] env[68674]: DEBUG oslo_vmware.api [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 1023.286258] env[68674]: value = "task-3240631" [ 1023.286258] env[68674]: _type = "Task" [ 1023.286258] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.294671] env[68674]: DEBUG oslo_vmware.api [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240631, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.423135] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1023.423660] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9bb376f2-4499-45d7-b714-96c00ee536fd tempest-ImagesOneServerTestJSON-2019475404 tempest-ImagesOneServerTestJSON-2019475404-project-member] Lock "a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.882s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1023.597158] env[68674]: DEBUG nova.network.neutron [req-67b78b38-d1a2-48a9-addf-d9ca7ecf23dc req-d788d4d4-b13b-4f29-b66e-97c451af1fe9 service nova] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Updated VIF entry in instance network info cache for port 4227b11c-e4da-42b5-80c0-af15c0b4de7f. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1023.598579] env[68674]: DEBUG nova.network.neutron [req-67b78b38-d1a2-48a9-addf-d9ca7ecf23dc req-d788d4d4-b13b-4f29-b66e-97c451af1fe9 service nova] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Updating instance_info_cache with network_info: [{"id": "4227b11c-e4da-42b5-80c0-af15c0b4de7f", "address": "fa:16:3e:e9:a4:ab", "network": {"id": "2141da47-c6b2-4270-9d0f-d999f7c26b83", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-460904531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa34d6d90c6d46aaa2cb77259b5e0c27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b9aabc7c-0f6c-42eb-bd27-493a1496c0c8", "external-id": "nsx-vlan-transportzone-368", "segmentation_id": 368, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4227b11c-e4", "ovs_interfaceid": "4227b11c-e4da-42b5-80c0-af15c0b4de7f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.648460] env[68674]: DEBUG oslo_vmware.api [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240630, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.758136] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Releasing lock "refresh_cache-142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1023.758136] env[68674]: DEBUG nova.compute.manager [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Instance network_info: |[{"id": "4d94c698-e74c-4238-8f2e-ead75015687e", "address": "fa:16:3e:2c:a1:73", "network": {"id": "e5c1d0d2-3458-4788-9640-4e14ad781436", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1292108367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9bc5a5f88cdd441fbb0df17cab2fcecc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8cbc9b8f-ce19-4262-bf4d-88cd4f259a1c", "external-id": "nsx-vlan-transportzone-630", "segmentation_id": 630, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d94c698-e7", "ovs_interfaceid": "4d94c698-e74c-4238-8f2e-ead75015687e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1023.758136] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2c:a1:73', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8cbc9b8f-ce19-4262-bf4d-88cd4f259a1c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4d94c698-e74c-4238-8f2e-ead75015687e', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1023.766254] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Creating folder: Project (9bc5a5f88cdd441fbb0df17cab2fcecc). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1023.766254] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b0b46f32-7a8d-495e-aa95-091191c841d4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.780173] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Created folder: Project (9bc5a5f88cdd441fbb0df17cab2fcecc) in parent group-v647377. [ 1023.780386] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Creating folder: Instances. Parent ref: group-v647659. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1023.780704] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d5f31720-5842-46d9-b890-711a63fecdea {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.792096] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Created folder: Instances in parent group-v647659. [ 1023.792386] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1023.793034] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1023.793276] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-75908301-0ef0-4f79-861d-fe6eb483dd91 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.821407] env[68674]: DEBUG oslo_vmware.api [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240631, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.494663} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.822195] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 66f4ab32-ef66-4d1d-93b6-775d59ce3c41/66f4ab32-ef66-4d1d-93b6-775d59ce3c41.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1023.822422] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1023.822698] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3420f8d9-71dd-4117-b9e5-119525b86047 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.830333] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1023.830333] env[68674]: value = "task-3240634" [ 1023.830333] env[68674]: _type = "Task" [ 1023.830333] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.835740] env[68674]: DEBUG oslo_vmware.api [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 1023.835740] env[68674]: value = "task-3240635" [ 1023.835740] env[68674]: _type = "Task" [ 1023.835740] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.840204] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240634, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.848836] env[68674]: DEBUG oslo_vmware.api [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240635, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.102252] env[68674]: DEBUG oslo_concurrency.lockutils [req-67b78b38-d1a2-48a9-addf-d9ca7ecf23dc req-d788d4d4-b13b-4f29-b66e-97c451af1fe9 service nova] Releasing lock "refresh_cache-182deaf0-c20a-4041-8f41-81786d6b053e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1024.102531] env[68674]: DEBUG nova.compute.manager [req-67b78b38-d1a2-48a9-addf-d9ca7ecf23dc req-d788d4d4-b13b-4f29-b66e-97c451af1fe9 service nova] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Received event network-vif-plugged-4d94c698-e74c-4238-8f2e-ead75015687e {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1024.102732] env[68674]: DEBUG oslo_concurrency.lockutils [req-67b78b38-d1a2-48a9-addf-d9ca7ecf23dc req-d788d4d4-b13b-4f29-b66e-97c451af1fe9 service nova] Acquiring lock "142e8ede-90e2-47cf-a1b1-8c4fd59eed0a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1024.102935] env[68674]: DEBUG oslo_concurrency.lockutils [req-67b78b38-d1a2-48a9-addf-d9ca7ecf23dc req-d788d4d4-b13b-4f29-b66e-97c451af1fe9 service nova] Lock "142e8ede-90e2-47cf-a1b1-8c4fd59eed0a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1024.103108] env[68674]: DEBUG oslo_concurrency.lockutils [req-67b78b38-d1a2-48a9-addf-d9ca7ecf23dc req-d788d4d4-b13b-4f29-b66e-97c451af1fe9 service nova] Lock "142e8ede-90e2-47cf-a1b1-8c4fd59eed0a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1024.103319] env[68674]: DEBUG nova.compute.manager [req-67b78b38-d1a2-48a9-addf-d9ca7ecf23dc req-d788d4d4-b13b-4f29-b66e-97c451af1fe9 service nova] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] No waiting events found dispatching network-vif-plugged-4d94c698-e74c-4238-8f2e-ead75015687e {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1024.103431] env[68674]: WARNING nova.compute.manager [req-67b78b38-d1a2-48a9-addf-d9ca7ecf23dc req-d788d4d4-b13b-4f29-b66e-97c451af1fe9 service nova] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Received unexpected event network-vif-plugged-4d94c698-e74c-4238-8f2e-ead75015687e for instance with vm_state building and task_state spawning. [ 1024.103670] env[68674]: DEBUG nova.compute.manager [req-67b78b38-d1a2-48a9-addf-d9ca7ecf23dc req-d788d4d4-b13b-4f29-b66e-97c451af1fe9 service nova] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Received event network-changed-4d94c698-e74c-4238-8f2e-ead75015687e {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1024.103829] env[68674]: DEBUG nova.compute.manager [req-67b78b38-d1a2-48a9-addf-d9ca7ecf23dc req-d788d4d4-b13b-4f29-b66e-97c451af1fe9 service nova] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Refreshing instance network info cache due to event network-changed-4d94c698-e74c-4238-8f2e-ead75015687e. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1024.104017] env[68674]: DEBUG oslo_concurrency.lockutils [req-67b78b38-d1a2-48a9-addf-d9ca7ecf23dc req-d788d4d4-b13b-4f29-b66e-97c451af1fe9 service nova] Acquiring lock "refresh_cache-142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.104179] env[68674]: DEBUG oslo_concurrency.lockutils [req-67b78b38-d1a2-48a9-addf-d9ca7ecf23dc req-d788d4d4-b13b-4f29-b66e-97c451af1fe9 service nova] Acquired lock "refresh_cache-142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1024.104438] env[68674]: DEBUG nova.network.neutron [req-67b78b38-d1a2-48a9-addf-d9ca7ecf23dc req-d788d4d4-b13b-4f29-b66e-97c451af1fe9 service nova] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Refreshing network info cache for port 4d94c698-e74c-4238-8f2e-ead75015687e {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1024.151406] env[68674]: DEBUG oslo_vmware.api [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240630, 'name': PowerOnVM_Task, 'duration_secs': 0.961263} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.151740] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1024.151993] env[68674]: INFO nova.compute.manager [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Took 7.99 seconds to spawn the instance on the hypervisor. [ 1024.152473] env[68674]: DEBUG nova.compute.manager [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1024.153809] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a0175a1-a43a-4bce-8295-38f020d9d72f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.225879] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d69d5e25-3cef-4b4f-b074-7623993c824e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.234940] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7739734c-f699-4130-9902-d4584507aa43 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.267324] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52070805-2073-4bc1-9280-abf324e35a66 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.276820] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6ce1e4b-148b-433f-9d95-e429d59145d9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.294173] env[68674]: DEBUG nova.compute.provider_tree [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1024.342629] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240634, 'name': CreateVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.351322] env[68674]: DEBUG oslo_vmware.api [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240635, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079037} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.352090] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1024.352816] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f559a25-d93b-4a91-95a6-05e63152351d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.384304] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] 66f4ab32-ef66-4d1d-93b6-775d59ce3c41/66f4ab32-ef66-4d1d-93b6-775d59ce3c41.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1024.385107] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ecd7d61a-9d0b-454f-93d4-41d915f2b189 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.409220] env[68674]: DEBUG oslo_vmware.api [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 1024.409220] env[68674]: value = "task-3240636" [ 1024.409220] env[68674]: _type = "Task" [ 1024.409220] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.415787] env[68674]: DEBUG oslo_vmware.api [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240636, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.435503] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Acquiring lock "cab97ca7-968b-4d40-bb1f-2244469e1b56" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1024.435840] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Lock "cab97ca7-968b-4d40-bb1f-2244469e1b56" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1024.436066] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Acquiring lock "cab97ca7-968b-4d40-bb1f-2244469e1b56-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1024.436252] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Lock "cab97ca7-968b-4d40-bb1f-2244469e1b56-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1024.436417] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Lock "cab97ca7-968b-4d40-bb1f-2244469e1b56-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1024.439260] env[68674]: INFO nova.compute.manager [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Terminating instance [ 1024.678970] env[68674]: INFO nova.compute.manager [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Took 23.11 seconds to build instance. [ 1024.797954] env[68674]: DEBUG nova.scheduler.client.report [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1024.842335] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240634, 'name': CreateVM_Task, 'duration_secs': 0.647867} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.842438] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1024.843110] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.843276] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1024.843643] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1024.843910] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e6fead8-046b-419f-ad2a-c68f0b13fc10 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.850273] env[68674]: DEBUG oslo_vmware.api [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for the task: (returnval){ [ 1024.850273] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52471797-d32b-5002-b19e-0891d48d77b2" [ 1024.850273] env[68674]: _type = "Task" [ 1024.850273] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.858363] env[68674]: DEBUG nova.network.neutron [req-67b78b38-d1a2-48a9-addf-d9ca7ecf23dc req-d788d4d4-b13b-4f29-b66e-97c451af1fe9 service nova] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Updated VIF entry in instance network info cache for port 4d94c698-e74c-4238-8f2e-ead75015687e. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1024.858808] env[68674]: DEBUG nova.network.neutron [req-67b78b38-d1a2-48a9-addf-d9ca7ecf23dc req-d788d4d4-b13b-4f29-b66e-97c451af1fe9 service nova] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Updating instance_info_cache with network_info: [{"id": "4d94c698-e74c-4238-8f2e-ead75015687e", "address": "fa:16:3e:2c:a1:73", "network": {"id": "e5c1d0d2-3458-4788-9640-4e14ad781436", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1292108367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9bc5a5f88cdd441fbb0df17cab2fcecc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8cbc9b8f-ce19-4262-bf4d-88cd4f259a1c", "external-id": "nsx-vlan-transportzone-630", "segmentation_id": 630, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d94c698-e7", "ovs_interfaceid": "4d94c698-e74c-4238-8f2e-ead75015687e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1024.863395] env[68674]: DEBUG oslo_vmware.api [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52471797-d32b-5002-b19e-0891d48d77b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.917388] env[68674]: DEBUG oslo_vmware.api [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240636, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.944782] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Acquiring lock "refresh_cache-cab97ca7-968b-4d40-bb1f-2244469e1b56" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.945059] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Acquired lock "refresh_cache-cab97ca7-968b-4d40-bb1f-2244469e1b56" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1024.945322] env[68674]: DEBUG nova.network.neutron [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1025.180978] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4abda4a9-b8bf-410f-8a88-c22525809b5f tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "79ee95b6-7321-4e33-a0e4-2c8ed1bc1031" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.630s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1025.305176] env[68674]: DEBUG oslo_concurrency.lockutils [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.421s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1025.305705] env[68674]: DEBUG nova.compute.manager [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1025.309628] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.898s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1025.311122] env[68674]: INFO nova.compute.claims [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1025.364766] env[68674]: DEBUG oslo_concurrency.lockutils [req-67b78b38-d1a2-48a9-addf-d9ca7ecf23dc req-d788d4d4-b13b-4f29-b66e-97c451af1fe9 service nova] Releasing lock "refresh_cache-142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1025.365545] env[68674]: DEBUG oslo_vmware.api [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52471797-d32b-5002-b19e-0891d48d77b2, 'name': SearchDatastore_Task, 'duration_secs': 0.012492} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.365545] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1025.365909] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1025.365987] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1025.366116] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1025.366304] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1025.366886] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2da62aff-1bfe-4fc8-8eca-d2abc4e5f512 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.377773] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1025.377973] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1025.378740] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-022b40e9-6d52-41bc-86f7-2fff1c62fb60 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.385715] env[68674]: DEBUG oslo_vmware.api [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for the task: (returnval){ [ 1025.385715] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523f996d-87fb-b58f-3661-4570a9281040" [ 1025.385715] env[68674]: _type = "Task" [ 1025.385715] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.395216] env[68674]: DEBUG oslo_vmware.api [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523f996d-87fb-b58f-3661-4570a9281040, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.417584] env[68674]: DEBUG oslo_vmware.api [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240636, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.469079] env[68674]: DEBUG nova.network.neutron [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1025.561125] env[68674]: DEBUG nova.network.neutron [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1025.811283] env[68674]: DEBUG nova.compute.utils [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1025.813639] env[68674]: DEBUG nova.compute.manager [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1025.813871] env[68674]: DEBUG nova.network.neutron [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1025.869775] env[68674]: DEBUG nova.policy [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ed36a9f9127b4bc7b619f45a45cf7401', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '43f00e26b76347d0bd40df46ac3acbcb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 1025.898798] env[68674]: DEBUG oslo_vmware.api [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523f996d-87fb-b58f-3661-4570a9281040, 'name': SearchDatastore_Task, 'duration_secs': 0.011869} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.899279] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a48e6e6b-54ea-42ef-ad5b-fc69b638e958 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.906540] env[68674]: DEBUG oslo_vmware.api [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for the task: (returnval){ [ 1025.906540] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52031e87-d1ac-da96-fa2f-49eb0cdd017f" [ 1025.906540] env[68674]: _type = "Task" [ 1025.906540] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.917381] env[68674]: DEBUG oslo_vmware.api [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52031e87-d1ac-da96-fa2f-49eb0cdd017f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.920415] env[68674]: DEBUG oslo_vmware.api [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240636, 'name': ReconfigVM_Task, 'duration_secs': 1.04805} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.921331] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Reconfigured VM instance instance-00000062 to attach disk [datastore2] 66f4ab32-ef66-4d1d-93b6-775d59ce3c41/66f4ab32-ef66-4d1d-93b6-775d59ce3c41.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1025.921331] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a6adc515-6fcd-446c-a48a-076ac3a6d553 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.929792] env[68674]: DEBUG oslo_vmware.api [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 1025.929792] env[68674]: value = "task-3240637" [ 1025.929792] env[68674]: _type = "Task" [ 1025.929792] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.938943] env[68674]: DEBUG oslo_vmware.api [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240637, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.064375] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Releasing lock "refresh_cache-cab97ca7-968b-4d40-bb1f-2244469e1b56" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1026.064938] env[68674]: DEBUG nova.compute.manager [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1026.065239] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1026.066786] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1fb1503-4816-4f15-89bd-6ec59058eed1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.077659] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1026.077946] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-272fdad0-66c8-4120-8fe9-1f7a9d08ac71 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.086649] env[68674]: DEBUG oslo_vmware.api [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Waiting for the task: (returnval){ [ 1026.086649] env[68674]: value = "task-3240638" [ 1026.086649] env[68674]: _type = "Task" [ 1026.086649] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.104616] env[68674]: DEBUG oslo_vmware.api [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': task-3240638, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.319558] env[68674]: DEBUG nova.compute.manager [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1026.421506] env[68674]: DEBUG oslo_vmware.api [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52031e87-d1ac-da96-fa2f-49eb0cdd017f, 'name': SearchDatastore_Task, 'duration_secs': 0.011778} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.422256] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1026.422722] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a/142e8ede-90e2-47cf-a1b1-8c4fd59eed0a.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1026.423195] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fd69c49e-e47b-4377-a57e-4c0ca478575c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.429491] env[68674]: DEBUG nova.network.neutron [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Successfully created port: e9c61ddf-0f69-4349-8c95-a0246ea52982 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1026.438858] env[68674]: DEBUG oslo_vmware.api [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for the task: (returnval){ [ 1026.438858] env[68674]: value = "task-3240639" [ 1026.438858] env[68674]: _type = "Task" [ 1026.438858] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.446963] env[68674]: DEBUG oslo_vmware.api [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240637, 'name': Rename_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.453826] env[68674]: DEBUG oslo_vmware.api [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3240639, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.602388] env[68674]: DEBUG oslo_vmware.api [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': task-3240638, 'name': PowerOffVM_Task, 'duration_secs': 0.239027} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.602773] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1026.603033] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1026.603451] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e35df30f-2ca0-45cf-a604-3e294d097b98 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.605448] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "79ee95b6-7321-4e33-a0e4-2c8ed1bc1031" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1026.605686] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "79ee95b6-7321-4e33-a0e4-2c8ed1bc1031" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1026.605884] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "79ee95b6-7321-4e33-a0e4-2c8ed1bc1031-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1026.606362] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "79ee95b6-7321-4e33-a0e4-2c8ed1bc1031-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1026.606362] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "79ee95b6-7321-4e33-a0e4-2c8ed1bc1031-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1026.611568] env[68674]: INFO nova.compute.manager [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Terminating instance [ 1026.643096] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dea06a5f-1fbb-4d56-a613-b6aad1b32670 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.648574] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1026.648779] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1026.648939] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Deleting the datastore file [datastore2] cab97ca7-968b-4d40-bb1f-2244469e1b56 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1026.649670] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-73c87ec1-8ed6-424e-a097-81b229ebc52e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.655188] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cda4269a-8970-4cea-a85d-31c2da204bc0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.661561] env[68674]: DEBUG oslo_vmware.api [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Waiting for the task: (returnval){ [ 1026.661561] env[68674]: value = "task-3240641" [ 1026.661561] env[68674]: _type = "Task" [ 1026.661561] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.696979] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbb32835-ce8c-4b33-9f15-cf5b49127336 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.700173] env[68674]: DEBUG oslo_vmware.api [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': task-3240641, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.707596] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-878b351d-f089-4bbd-8f4f-af065289e404 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.726606] env[68674]: DEBUG nova.compute.provider_tree [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1026.942062] env[68674]: DEBUG oslo_vmware.api [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240637, 'name': Rename_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.952867] env[68674]: DEBUG oslo_vmware.api [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3240639, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496358} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.953215] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a/142e8ede-90e2-47cf-a1b1-8c4fd59eed0a.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1026.953735] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1026.954946] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-600bc917-c43c-4ded-8ad1-e8e9c0a6468f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.964057] env[68674]: DEBUG oslo_vmware.api [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for the task: (returnval){ [ 1026.964057] env[68674]: value = "task-3240642" [ 1026.964057] env[68674]: _type = "Task" [ 1026.964057] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.975710] env[68674]: DEBUG oslo_vmware.api [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3240642, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.116444] env[68674]: DEBUG nova.compute.manager [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1027.116700] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1027.117979] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f8d878f-a8ce-4f20-aa02-2f07a852e0fb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.126211] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1027.126453] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-992d47e2-e65a-417a-b2d0-da2da6617d7f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.133649] env[68674]: DEBUG oslo_vmware.api [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 1027.133649] env[68674]: value = "task-3240643" [ 1027.133649] env[68674]: _type = "Task" [ 1027.133649] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.142478] env[68674]: DEBUG oslo_vmware.api [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240643, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.172473] env[68674]: DEBUG oslo_vmware.api [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Task: {'id': task-3240641, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.337873} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.172726] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1027.172909] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1027.173098] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1027.173272] env[68674]: INFO nova.compute.manager [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1027.173530] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1027.173725] env[68674]: DEBUG nova.compute.manager [-] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1027.173820] env[68674]: DEBUG nova.network.neutron [-] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1027.200465] env[68674]: DEBUG nova.network.neutron [-] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1027.231147] env[68674]: DEBUG nova.scheduler.client.report [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1027.335284] env[68674]: DEBUG nova.compute.manager [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1027.364866] env[68674]: DEBUG nova.virt.hardware [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1027.365280] env[68674]: DEBUG nova.virt.hardware [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1027.365556] env[68674]: DEBUG nova.virt.hardware [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1027.365975] env[68674]: DEBUG nova.virt.hardware [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1027.366164] env[68674]: DEBUG nova.virt.hardware [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1027.366432] env[68674]: DEBUG nova.virt.hardware [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1027.366791] env[68674]: DEBUG nova.virt.hardware [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1027.367093] env[68674]: DEBUG nova.virt.hardware [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1027.367394] env[68674]: DEBUG nova.virt.hardware [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1027.367708] env[68674]: DEBUG nova.virt.hardware [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1027.368028] env[68674]: DEBUG nova.virt.hardware [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1027.369411] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-865eeeff-cf8a-4c2a-a09c-7a86069191e1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.381323] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4051a927-b7c9-4bd4-aab9-2da1517b984e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.442155] env[68674]: DEBUG oslo_vmware.api [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240637, 'name': Rename_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.476615] env[68674]: DEBUG oslo_vmware.api [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3240642, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088041} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.476872] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1027.477718] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd9fa743-c99f-47b2-9bdb-33ae722fb1c3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.498657] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a6d0d234-4270-4bd9-9e15-fc529e8d5409 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquiring lock "63d6c185-db2c-4ede-a716-9a0dd432ab1f" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1027.499447] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a6d0d234-4270-4bd9-9e15-fc529e8d5409 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "63d6c185-db2c-4ede-a716-9a0dd432ab1f" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1027.512345] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Reconfiguring VM instance instance-00000063 to attach disk [datastore2] 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a/142e8ede-90e2-47cf-a1b1-8c4fd59eed0a.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1027.513517] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f952586f-b0c5-49cb-865b-23683c0f4eb4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.545597] env[68674]: DEBUG oslo_vmware.api [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for the task: (returnval){ [ 1027.545597] env[68674]: value = "task-3240644" [ 1027.545597] env[68674]: _type = "Task" [ 1027.545597] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.555720] env[68674]: DEBUG oslo_vmware.api [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3240644, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.653902] env[68674]: DEBUG oslo_vmware.api [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240643, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.702686] env[68674]: DEBUG nova.network.neutron [-] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.736455] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.427s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1027.737066] env[68674]: DEBUG nova.compute.manager [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1027.741235] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.905s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1027.741776] env[68674]: DEBUG nova.objects.instance [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lazy-loading 'resources' on Instance uuid 5c12cb5d-821c-4e63-86a0-dadc9794a8ba {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1027.943693] env[68674]: DEBUG oslo_vmware.api [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240637, 'name': Rename_Task, 'duration_secs': 1.658409} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.944149] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1027.944248] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-02e4a447-c9a9-4471-9651-171c04c701dd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.951451] env[68674]: DEBUG oslo_vmware.api [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 1027.951451] env[68674]: value = "task-3240645" [ 1027.951451] env[68674]: _type = "Task" [ 1027.951451] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.959309] env[68674]: DEBUG oslo_vmware.api [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240645, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.014457] env[68674]: INFO nova.compute.manager [None req-a6d0d234-4270-4bd9-9e15-fc529e8d5409 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Detaching volume 268d6a73-58d5-4541-bfb5-15e693956d5f [ 1028.065020] env[68674]: DEBUG oslo_vmware.api [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3240644, 'name': ReconfigVM_Task, 'duration_secs': 0.307921} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.065020] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Reconfigured VM instance instance-00000063 to attach disk [datastore2] 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a/142e8ede-90e2-47cf-a1b1-8c4fd59eed0a.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1028.065020] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a6fa346c-1539-4756-bc61-0adfbf175a12 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.069469] env[68674]: INFO nova.virt.block_device [None req-a6d0d234-4270-4bd9-9e15-fc529e8d5409 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Attempting to driver detach volume 268d6a73-58d5-4541-bfb5-15e693956d5f from mountpoint /dev/sdb [ 1028.069822] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6d0d234-4270-4bd9-9e15-fc529e8d5409 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Volume detach. Driver type: vmdk {{(pid=68674) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1028.070138] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6d0d234-4270-4bd9-9e15-fc529e8d5409 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647612', 'volume_id': '268d6a73-58d5-4541-bfb5-15e693956d5f', 'name': 'volume-268d6a73-58d5-4541-bfb5-15e693956d5f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '63d6c185-db2c-4ede-a716-9a0dd432ab1f', 'attached_at': '', 'detached_at': '', 'volume_id': '268d6a73-58d5-4541-bfb5-15e693956d5f', 'serial': '268d6a73-58d5-4541-bfb5-15e693956d5f'} {{(pid=68674) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1028.072020] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d4d6494-141a-495f-b4e3-311f190043d0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.075381] env[68674]: DEBUG oslo_vmware.api [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for the task: (returnval){ [ 1028.075381] env[68674]: value = "task-3240646" [ 1028.075381] env[68674]: _type = "Task" [ 1028.075381] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.099029] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3d68e5a-c1d1-46f7-9e8d-170d40d38592 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.106688] env[68674]: DEBUG oslo_vmware.api [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3240646, 'name': Rename_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.112318] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97994d70-4497-4725-b4c9-3812fb98994e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.139435] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b3ba66d-b480-4563-8d6b-d25d5bbe21f6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.148559] env[68674]: DEBUG oslo_vmware.api [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240643, 'name': PowerOffVM_Task, 'duration_secs': 0.594666} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.161183] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1028.161326] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1028.161716] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6d0d234-4270-4bd9-9e15-fc529e8d5409 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] The volume has not been displaced from its original location: [datastore1] volume-268d6a73-58d5-4541-bfb5-15e693956d5f/volume-268d6a73-58d5-4541-bfb5-15e693956d5f.vmdk. No consolidation needed. {{(pid=68674) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1028.167095] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6d0d234-4270-4bd9-9e15-fc529e8d5409 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Reconfiguring VM instance instance-00000041 to detach disk 2001 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1028.167445] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c9e9c0fe-8415-4648-bc43-43cab0c6f579 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.169210] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f1b9092-da9d-4400-bd90-4adbf4b5cf20 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.188640] env[68674]: DEBUG oslo_vmware.api [None req-a6d0d234-4270-4bd9-9e15-fc529e8d5409 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 1028.188640] env[68674]: value = "task-3240648" [ 1028.188640] env[68674]: _type = "Task" [ 1028.188640] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.200304] env[68674]: DEBUG oslo_vmware.api [None req-a6d0d234-4270-4bd9-9e15-fc529e8d5409 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240648, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.205314] env[68674]: INFO nova.compute.manager [-] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Took 1.03 seconds to deallocate network for instance. [ 1028.248042] env[68674]: DEBUG nova.compute.utils [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1028.253421] env[68674]: DEBUG nova.compute.manager [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1028.257034] env[68674]: DEBUG nova.network.neutron [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1028.267509] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1028.267850] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1028.268173] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Deleting the datastore file [datastore2] 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1028.269623] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3ded2dc6-d284-485a-bea5-d8e845f81a1c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.280031] env[68674]: DEBUG oslo_vmware.api [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for the task: (returnval){ [ 1028.280031] env[68674]: value = "task-3240649" [ 1028.280031] env[68674]: _type = "Task" [ 1028.280031] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.291413] env[68674]: DEBUG oslo_vmware.api [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240649, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.384620] env[68674]: DEBUG nova.policy [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e268da8edd47413b9b87909dde064f64', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0cee54e456084086866d08b098a24b64', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 1028.462805] env[68674]: DEBUG oslo_vmware.api [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240645, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.587685] env[68674]: DEBUG oslo_vmware.api [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3240646, 'name': Rename_Task, 'duration_secs': 0.350154} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.587685] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1028.587943] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-45524019-44d2-45fa-a59f-e10a8ac34174 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.599906] env[68674]: DEBUG oslo_vmware.api [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for the task: (returnval){ [ 1028.599906] env[68674]: value = "task-3240650" [ 1028.599906] env[68674]: _type = "Task" [ 1028.599906] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.613472] env[68674]: DEBUG oslo_vmware.api [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3240650, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.659586] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72c9626a-96f9-43ef-8085-7ec2dc2f839e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.672186] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-823bc62b-b330-4eac-a05f-e78b03102fc6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.714846] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1028.721955] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc76660d-9b76-40fe-b4da-91ae1d3e7de0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.725690] env[68674]: DEBUG nova.compute.manager [req-db198bd7-8eea-4b44-8e04-eb5b8142a17a req-4b0a84c5-196e-484c-a1de-0358f2cbbcbc service nova] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Received event network-vif-plugged-e9c61ddf-0f69-4349-8c95-a0246ea52982 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1028.726101] env[68674]: DEBUG oslo_concurrency.lockutils [req-db198bd7-8eea-4b44-8e04-eb5b8142a17a req-4b0a84c5-196e-484c-a1de-0358f2cbbcbc service nova] Acquiring lock "e9bebb3b-78ff-42b1-a350-efd1db5c6eaa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1028.726163] env[68674]: DEBUG oslo_concurrency.lockutils [req-db198bd7-8eea-4b44-8e04-eb5b8142a17a req-4b0a84c5-196e-484c-a1de-0358f2cbbcbc service nova] Lock "e9bebb3b-78ff-42b1-a350-efd1db5c6eaa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1028.729515] env[68674]: DEBUG oslo_concurrency.lockutils [req-db198bd7-8eea-4b44-8e04-eb5b8142a17a req-4b0a84c5-196e-484c-a1de-0358f2cbbcbc service nova] Lock "e9bebb3b-78ff-42b1-a350-efd1db5c6eaa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1028.729515] env[68674]: DEBUG nova.compute.manager [req-db198bd7-8eea-4b44-8e04-eb5b8142a17a req-4b0a84c5-196e-484c-a1de-0358f2cbbcbc service nova] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] No waiting events found dispatching network-vif-plugged-e9c61ddf-0f69-4349-8c95-a0246ea52982 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1028.729515] env[68674]: WARNING nova.compute.manager [req-db198bd7-8eea-4b44-8e04-eb5b8142a17a req-4b0a84c5-196e-484c-a1de-0358f2cbbcbc service nova] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Received unexpected event network-vif-plugged-e9c61ddf-0f69-4349-8c95-a0246ea52982 for instance with vm_state building and task_state spawning. [ 1028.737106] env[68674]: DEBUG oslo_vmware.api [None req-a6d0d234-4270-4bd9-9e15-fc529e8d5409 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240648, 'name': ReconfigVM_Task, 'duration_secs': 0.280111} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.737372] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6d0d234-4270-4bd9-9e15-fc529e8d5409 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Reconfigured VM instance instance-00000041 to detach disk 2001 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1028.743309] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d030c1-e4e7-44b4-80e8-9f567f0eb019 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.747569] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-65a3ebff-1d7b-45c1-bdba-7dae2a328ef7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.758699] env[68674]: DEBUG nova.compute.manager [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1028.776195] env[68674]: DEBUG nova.compute.provider_tree [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1028.782024] env[68674]: DEBUG oslo_vmware.api [None req-a6d0d234-4270-4bd9-9e15-fc529e8d5409 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 1028.782024] env[68674]: value = "task-3240651" [ 1028.782024] env[68674]: _type = "Task" [ 1028.782024] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.793291] env[68674]: DEBUG oslo_vmware.api [None req-a6d0d234-4270-4bd9-9e15-fc529e8d5409 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240651, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.796064] env[68674]: DEBUG oslo_vmware.api [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Task: {'id': task-3240649, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.239793} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.796335] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1028.796554] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1028.796716] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1028.797079] env[68674]: INFO nova.compute.manager [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Took 1.68 seconds to destroy the instance on the hypervisor. [ 1028.797167] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1028.798066] env[68674]: DEBUG nova.compute.manager [-] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1028.798066] env[68674]: DEBUG nova.network.neutron [-] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1028.964958] env[68674]: DEBUG oslo_vmware.api [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240645, 'name': PowerOnVM_Task, 'duration_secs': 0.567395} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.965416] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1028.965754] env[68674]: INFO nova.compute.manager [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Took 10.35 seconds to spawn the instance on the hypervisor. [ 1028.966100] env[68674]: DEBUG nova.compute.manager [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1028.967337] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd948414-f066-47c0-a98b-7b23507617f6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.000560] env[68674]: DEBUG nova.network.neutron [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Successfully updated port: e9c61ddf-0f69-4349-8c95-a0246ea52982 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1029.114444] env[68674]: DEBUG oslo_vmware.api [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3240650, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.166780] env[68674]: DEBUG nova.network.neutron [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Successfully created port: cc6fc3d6-ffdc-41b7-a019-9c9523f037b0 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1029.263807] env[68674]: DEBUG oslo_concurrency.lockutils [None req-90ad8ae5-25d4-49e0-a146-7907f0ce615d tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "2a7a6269-65a8-402c-b174-a4a46d20a33a" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1029.263961] env[68674]: DEBUG oslo_concurrency.lockutils [None req-90ad8ae5-25d4-49e0-a146-7907f0ce615d tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "2a7a6269-65a8-402c-b174-a4a46d20a33a" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1029.296208] env[68674]: DEBUG oslo_vmware.api [None req-a6d0d234-4270-4bd9-9e15-fc529e8d5409 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240651, 'name': ReconfigVM_Task, 'duration_secs': 0.202444} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.296734] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6d0d234-4270-4bd9-9e15-fc529e8d5409 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647612', 'volume_id': '268d6a73-58d5-4541-bfb5-15e693956d5f', 'name': 'volume-268d6a73-58d5-4541-bfb5-15e693956d5f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '63d6c185-db2c-4ede-a716-9a0dd432ab1f', 'attached_at': '', 'detached_at': '', 'volume_id': '268d6a73-58d5-4541-bfb5-15e693956d5f', 'serial': '268d6a73-58d5-4541-bfb5-15e693956d5f'} {{(pid=68674) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1029.304736] env[68674]: ERROR nova.scheduler.client.report [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [req-7b7d5561-0376-4a22-837c-11e058af6ba7] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ade3f042-7427-494b-9654-0b65e074850c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-7b7d5561-0376-4a22-837c-11e058af6ba7"}]} [ 1029.332093] env[68674]: DEBUG nova.scheduler.client.report [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Refreshing inventories for resource provider ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1029.353494] env[68674]: DEBUG nova.scheduler.client.report [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Updating ProviderTree inventory for provider ade3f042-7427-494b-9654-0b65e074850c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1029.353765] env[68674]: DEBUG nova.compute.provider_tree [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1029.368978] env[68674]: DEBUG nova.scheduler.client.report [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Refreshing aggregate associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, aggregates: None {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1029.369206] env[68674]: DEBUG nova.compute.provider_tree [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Updating resource provider ade3f042-7427-494b-9654-0b65e074850c generation from 140 to 141 during operation: update_aggregates {{(pid=68674) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1029.387234] env[68674]: DEBUG nova.scheduler.client.report [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Refreshing trait associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1029.491662] env[68674]: INFO nova.compute.manager [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Took 27.67 seconds to build instance. [ 1029.503381] env[68674]: DEBUG oslo_concurrency.lockutils [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Acquiring lock "refresh_cache-e9bebb3b-78ff-42b1-a350-efd1db5c6eaa" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1029.504993] env[68674]: DEBUG oslo_concurrency.lockutils [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Acquired lock "refresh_cache-e9bebb3b-78ff-42b1-a350-efd1db5c6eaa" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1029.504993] env[68674]: DEBUG nova.network.neutron [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1029.616962] env[68674]: DEBUG oslo_vmware.api [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3240650, 'name': PowerOnVM_Task, 'duration_secs': 0.608744} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.617443] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1029.617505] env[68674]: INFO nova.compute.manager [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Took 8.59 seconds to spawn the instance on the hypervisor. [ 1029.617678] env[68674]: DEBUG nova.compute.manager [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1029.618877] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bc20527-9b26-4bd5-9406-b0a0eff5049e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.679723] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28a68453-b14f-4bfd-9ef9-010b5c4d64e1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.689577] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edd0197a-6f84-4843-bc79-85a76d0e126a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.724664] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6825620-e67f-44e6-8315-d81c1f1dee6a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.732856] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f78a97fc-040c-4932-92d7-8b858063eab9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.747938] env[68674]: DEBUG nova.compute.provider_tree [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1029.771387] env[68674]: DEBUG nova.compute.manager [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1029.773208] env[68674]: DEBUG nova.compute.utils [None req-90ad8ae5-25d4-49e0-a146-7907f0ce615d tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1029.799156] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "6dc530e4-fb03-45dc-8d70-9f0e8731dfdc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1029.799584] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "6dc530e4-fb03-45dc-8d70-9f0e8731dfdc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1029.808069] env[68674]: DEBUG nova.network.neutron [-] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.811636] env[68674]: DEBUG nova.virt.hardware [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1029.811895] env[68674]: DEBUG nova.virt.hardware [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1029.812068] env[68674]: DEBUG nova.virt.hardware [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1029.812259] env[68674]: DEBUG nova.virt.hardware [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1029.812407] env[68674]: DEBUG nova.virt.hardware [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1029.812554] env[68674]: DEBUG nova.virt.hardware [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1029.812764] env[68674]: DEBUG nova.virt.hardware [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1029.812922] env[68674]: DEBUG nova.virt.hardware [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1029.813336] env[68674]: DEBUG nova.virt.hardware [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1029.813594] env[68674]: DEBUG nova.virt.hardware [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1029.813794] env[68674]: DEBUG nova.virt.hardware [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1029.814680] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b4716d9-1e06-4668-8d4c-2dccbb30112b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.825374] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f53a3c90-a4d3-4d01-b2b7-e7fad757b439 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.854460] env[68674]: DEBUG nova.objects.instance [None req-a6d0d234-4270-4bd9-9e15-fc529e8d5409 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lazy-loading 'flavor' on Instance uuid 63d6c185-db2c-4ede-a716-9a0dd432ab1f {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1029.993211] env[68674]: DEBUG oslo_concurrency.lockutils [None req-293a50b0-4840-401f-9d4e-5d697f8f5e38 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "66f4ab32-ef66-4d1d-93b6-775d59ce3c41" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.181s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.060516] env[68674]: DEBUG nova.network.neutron [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1030.140808] env[68674]: INFO nova.compute.manager [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Took 27.34 seconds to build instance. [ 1030.276482] env[68674]: DEBUG nova.network.neutron [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Updating instance_info_cache with network_info: [{"id": "e9c61ddf-0f69-4349-8c95-a0246ea52982", "address": "fa:16:3e:2d:77:7a", "network": {"id": "1b0c763a-1b06-4dfb-9376-f9d411619180", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1366824526-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "43f00e26b76347d0bd40df46ac3acbcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "939c05b6-8f31-4f3a-95ac-6297e0bd243e", "external-id": "nsx-vlan-transportzone-825", "segmentation_id": 825, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9c61ddf-0f", "ovs_interfaceid": "e9c61ddf-0f69-4349-8c95-a0246ea52982", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.276482] env[68674]: DEBUG oslo_concurrency.lockutils [None req-90ad8ae5-25d4-49e0-a146-7907f0ce615d tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "2a7a6269-65a8-402c-b174-a4a46d20a33a" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.012s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.291065] env[68674]: DEBUG nova.scheduler.client.report [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Updated inventory for provider ade3f042-7427-494b-9654-0b65e074850c with generation 141 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1030.291065] env[68674]: DEBUG nova.compute.provider_tree [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Updating resource provider ade3f042-7427-494b-9654-0b65e074850c generation from 141 to 142 during operation: update_inventory {{(pid=68674) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1030.291065] env[68674]: DEBUG nova.compute.provider_tree [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1030.301218] env[68674]: DEBUG nova.compute.manager [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1030.309990] env[68674]: INFO nova.compute.manager [-] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Took 1.51 seconds to deallocate network for instance. [ 1030.643675] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6c4ee8c3-1428-4b6a-b20c-979a33adf960 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.852s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.772200] env[68674]: DEBUG nova.network.neutron [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Successfully updated port: cc6fc3d6-ffdc-41b7-a019-9c9523f037b0 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1030.784198] env[68674]: DEBUG oslo_concurrency.lockutils [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Releasing lock "refresh_cache-e9bebb3b-78ff-42b1-a350-efd1db5c6eaa" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1030.784198] env[68674]: DEBUG nova.compute.manager [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Instance network_info: |[{"id": "e9c61ddf-0f69-4349-8c95-a0246ea52982", "address": "fa:16:3e:2d:77:7a", "network": {"id": "1b0c763a-1b06-4dfb-9376-f9d411619180", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1366824526-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "43f00e26b76347d0bd40df46ac3acbcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "939c05b6-8f31-4f3a-95ac-6297e0bd243e", "external-id": "nsx-vlan-transportzone-825", "segmentation_id": 825, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9c61ddf-0f", "ovs_interfaceid": "e9c61ddf-0f69-4349-8c95-a0246ea52982", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1030.784198] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2d:77:7a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '939c05b6-8f31-4f3a-95ac-6297e0bd243e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e9c61ddf-0f69-4349-8c95-a0246ea52982', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1030.793181] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Creating folder: Project (43f00e26b76347d0bd40df46ac3acbcb). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1030.793852] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c55f0fcf-1d67-40d9-b90d-821d4c1e5237 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.800134] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.060s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.802562] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 7.380s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1030.804170] env[68674]: DEBUG nova.objects.instance [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68674) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1030.819450] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1030.819724] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Created folder: Project (43f00e26b76347d0bd40df46ac3acbcb) in parent group-v647377. [ 1030.820105] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Creating folder: Instances. Parent ref: group-v647662. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1030.820725] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ea29cdb1-1084-45f9-9f32-8b240974087b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.830405] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1030.838361] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Created folder: Instances in parent group-v647662. [ 1030.838361] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1030.839271] env[68674]: INFO nova.scheduler.client.report [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Deleted allocations for instance 5c12cb5d-821c-4e63-86a0-dadc9794a8ba [ 1030.840470] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1030.843496] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c3a77eb9-f460-4ae2-b25f-f8dc6f194ea2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.637752] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a6d0d234-4270-4bd9-9e15-fc529e8d5409 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "63d6c185-db2c-4ede-a716-9a0dd432ab1f" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.139s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1031.642249] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "refresh_cache-8740a794-a772-4260-aeb1-51762a586fe2" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.642249] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquired lock "refresh_cache-8740a794-a772-4260-aeb1-51762a586fe2" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1031.642249] env[68674]: DEBUG nova.network.neutron [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1031.646695] env[68674]: DEBUG nova.compute.manager [req-f711adb1-6fed-4e8d-acaa-fc2258158d47 req-7f519ba1-4285-491c-8dec-18826097ef19 service nova] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Received event network-changed-e9c61ddf-0f69-4349-8c95-a0246ea52982 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1031.647188] env[68674]: DEBUG nova.compute.manager [req-f711adb1-6fed-4e8d-acaa-fc2258158d47 req-7f519ba1-4285-491c-8dec-18826097ef19 service nova] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Refreshing instance network info cache due to event network-changed-e9c61ddf-0f69-4349-8c95-a0246ea52982. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1031.647188] env[68674]: DEBUG oslo_concurrency.lockutils [req-f711adb1-6fed-4e8d-acaa-fc2258158d47 req-7f519ba1-4285-491c-8dec-18826097ef19 service nova] Acquiring lock "refresh_cache-e9bebb3b-78ff-42b1-a350-efd1db5c6eaa" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.647324] env[68674]: DEBUG oslo_concurrency.lockutils [req-f711adb1-6fed-4e8d-acaa-fc2258158d47 req-7f519ba1-4285-491c-8dec-18826097ef19 service nova] Acquired lock "refresh_cache-e9bebb3b-78ff-42b1-a350-efd1db5c6eaa" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1031.647495] env[68674]: DEBUG nova.network.neutron [req-f711adb1-6fed-4e8d-acaa-fc2258158d47 req-7f519ba1-4285-491c-8dec-18826097ef19 service nova] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Refreshing network info cache for port e9c61ddf-0f69-4349-8c95-a0246ea52982 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1031.656334] env[68674]: DEBUG oslo_concurrency.lockutils [None req-cc71577a-ada1-49f3-8823-33525add94c0 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lock "5c12cb5d-821c-4e63-86a0-dadc9794a8ba" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.816s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1031.660918] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "bd3ae195-6e01-49d5-9fcf-9520273d9108" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1031.660918] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "bd3ae195-6e01-49d5-9fcf-9520273d9108" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1031.667970] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1031.667970] env[68674]: value = "task-3240654" [ 1031.667970] env[68674]: _type = "Task" [ 1031.667970] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.684988] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240654, 'name': CreateVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.158513] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3936d6f1-5752-44be-b88d-bb5eb1613a3f tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.356s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1032.159649] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.445s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1032.159984] env[68674]: DEBUG nova.objects.instance [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Lazy-loading 'resources' on Instance uuid cab97ca7-968b-4d40-bb1f-2244469e1b56 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1032.162240] env[68674]: DEBUG nova.compute.manager [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1032.180028] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4c036620-cbff-4808-975e-1c2460e8e6f1 tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Acquiring lock "fa8c58b7-a462-437f-b1ed-57fef6aa3903" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.180458] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4c036620-cbff-4808-975e-1c2460e8e6f1 tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Lock "fa8c58b7-a462-437f-b1ed-57fef6aa3903" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1032.181253] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4c036620-cbff-4808-975e-1c2460e8e6f1 tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Acquiring lock "fa8c58b7-a462-437f-b1ed-57fef6aa3903-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.181512] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4c036620-cbff-4808-975e-1c2460e8e6f1 tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Lock "fa8c58b7-a462-437f-b1ed-57fef6aa3903-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1032.181722] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4c036620-cbff-4808-975e-1c2460e8e6f1 tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Lock "fa8c58b7-a462-437f-b1ed-57fef6aa3903-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1032.183134] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240654, 'name': CreateVM_Task, 'duration_secs': 0.44947} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.183776] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1032.185103] env[68674]: DEBUG oslo_concurrency.lockutils [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.185103] env[68674]: DEBUG oslo_concurrency.lockutils [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1032.185103] env[68674]: DEBUG oslo_concurrency.lockutils [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1032.186168] env[68674]: INFO nova.compute.manager [None req-4c036620-cbff-4808-975e-1c2460e8e6f1 tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Terminating instance [ 1032.186743] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc529008-8ce8-4d31-aeee-afb25acfa91c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.189800] env[68674]: DEBUG oslo_concurrency.lockutils [None req-90ad8ae5-25d4-49e0-a146-7907f0ce615d tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "2a7a6269-65a8-402c-b174-a4a46d20a33a" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.189800] env[68674]: DEBUG oslo_concurrency.lockutils [None req-90ad8ae5-25d4-49e0-a146-7907f0ce615d tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "2a7a6269-65a8-402c-b174-a4a46d20a33a" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1032.190168] env[68674]: INFO nova.compute.manager [None req-90ad8ae5-25d4-49e0-a146-7907f0ce615d tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Attaching volume b2708da3-c713-480b-98a2-3fe862ad5593 to /dev/sdb [ 1032.191681] env[68674]: DEBUG oslo_concurrency.lockutils [None req-99ee7910-83f1-4c76-8a41-916861474c49 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquiring lock "63d6c185-db2c-4ede-a716-9a0dd432ab1f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.191874] env[68674]: DEBUG oslo_concurrency.lockutils [None req-99ee7910-83f1-4c76-8a41-916861474c49 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "63d6c185-db2c-4ede-a716-9a0dd432ab1f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1032.192061] env[68674]: DEBUG oslo_concurrency.lockutils [None req-99ee7910-83f1-4c76-8a41-916861474c49 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquiring lock "63d6c185-db2c-4ede-a716-9a0dd432ab1f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.192261] env[68674]: DEBUG oslo_concurrency.lockutils [None req-99ee7910-83f1-4c76-8a41-916861474c49 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "63d6c185-db2c-4ede-a716-9a0dd432ab1f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1032.192447] env[68674]: DEBUG oslo_concurrency.lockutils [None req-99ee7910-83f1-4c76-8a41-916861474c49 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "63d6c185-db2c-4ede-a716-9a0dd432ab1f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1032.193783] env[68674]: DEBUG oslo_concurrency.lockutils [None req-97c03ab8-c484-4020-8a71-61ea873d3a38 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquiring lock "f029042f-d80b-453e-adc9-1e65d7da7aaf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.193995] env[68674]: DEBUG oslo_concurrency.lockutils [None req-97c03ab8-c484-4020-8a71-61ea873d3a38 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lock "f029042f-d80b-453e-adc9-1e65d7da7aaf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1032.194216] env[68674]: DEBUG oslo_concurrency.lockutils [None req-97c03ab8-c484-4020-8a71-61ea873d3a38 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquiring lock "f029042f-d80b-453e-adc9-1e65d7da7aaf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.194411] env[68674]: DEBUG oslo_concurrency.lockutils [None req-97c03ab8-c484-4020-8a71-61ea873d3a38 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lock "f029042f-d80b-453e-adc9-1e65d7da7aaf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1032.194616] env[68674]: DEBUG oslo_concurrency.lockutils [None req-97c03ab8-c484-4020-8a71-61ea873d3a38 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lock "f029042f-d80b-453e-adc9-1e65d7da7aaf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1032.198725] env[68674]: DEBUG nova.compute.manager [None req-4c036620-cbff-4808-975e-1c2460e8e6f1 tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1032.198921] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4c036620-cbff-4808-975e-1c2460e8e6f1 tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1032.199924] env[68674]: DEBUG nova.network.neutron [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1032.201955] env[68674]: INFO nova.compute.manager [None req-97c03ab8-c484-4020-8a71-61ea873d3a38 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Terminating instance [ 1032.206870] env[68674]: INFO nova.compute.manager [None req-99ee7910-83f1-4c76-8a41-916861474c49 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Terminating instance [ 1032.207858] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba86c9f7-a1cd-4538-87c4-b8ea4fa7ef34 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.216195] env[68674]: DEBUG oslo_vmware.api [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1032.216195] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52dc75b0-f218-361c-984c-425074af02b6" [ 1032.216195] env[68674]: _type = "Task" [ 1032.216195] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.224179] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c036620-cbff-4808-975e-1c2460e8e6f1 tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1032.224894] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dabd568e-82e7-40ec-8fa0-9ef82f0ee903 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.231937] env[68674]: DEBUG oslo_vmware.api [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52dc75b0-f218-361c-984c-425074af02b6, 'name': SearchDatastore_Task, 'duration_secs': 0.013588} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.233221] env[68674]: DEBUG oslo_concurrency.lockutils [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1032.233461] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1032.233882] env[68674]: DEBUG oslo_concurrency.lockutils [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.233882] env[68674]: DEBUG oslo_concurrency.lockutils [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1032.234150] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1032.234507] env[68674]: DEBUG oslo_vmware.api [None req-4c036620-cbff-4808-975e-1c2460e8e6f1 tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Waiting for the task: (returnval){ [ 1032.234507] env[68674]: value = "task-3240655" [ 1032.234507] env[68674]: _type = "Task" [ 1032.234507] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.234726] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-424d59c3-c254-4e3d-987f-ef208b93dd3f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.249663] env[68674]: DEBUG oslo_vmware.api [None req-4c036620-cbff-4808-975e-1c2460e8e6f1 tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Task: {'id': task-3240655, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.251776] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1032.251995] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1032.253188] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edfab190-0ac0-4829-a2ca-4d88b555519f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.256142] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33eb02d4-6dfa-4318-be1b-8cb3de02a38d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.266084] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb06b249-4235-4808-9a51-16a5585e4a96 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.268634] env[68674]: DEBUG oslo_vmware.api [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1032.268634] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5276696f-ca96-04f7-cc00-c1037811096b" [ 1032.268634] env[68674]: _type = "Task" [ 1032.268634] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.281015] env[68674]: DEBUG oslo_vmware.api [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5276696f-ca96-04f7-cc00-c1037811096b, 'name': SearchDatastore_Task, 'duration_secs': 0.012628} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.285053] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a05037be-67c7-4cf4-8d4e-4c886c5141d0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.287404] env[68674]: DEBUG nova.virt.block_device [None req-90ad8ae5-25d4-49e0-a146-7907f0ce615d tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Updating existing volume attachment record: 40b182da-e288-4af5-bc5d-f5db6b6a7b02 {{(pid=68674) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1032.298272] env[68674]: DEBUG oslo_vmware.api [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1032.298272] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f66cf1-7ee3-2a09-c4fa-c6e88b36956d" [ 1032.298272] env[68674]: _type = "Task" [ 1032.298272] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.305000] env[68674]: DEBUG oslo_vmware.api [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f66cf1-7ee3-2a09-c4fa-c6e88b36956d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.523266] env[68674]: DEBUG nova.network.neutron [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Updating instance_info_cache with network_info: [{"id": "cc6fc3d6-ffdc-41b7-a019-9c9523f037b0", "address": "fa:16:3e:2b:09:2a", "network": {"id": "c0c4733f-8d0b-4cee-883f-2ad57ed16158", "bridge": "br-int", "label": "tempest-ServersTestJSON-40114649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cee54e456084086866d08b098a24b64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc6fc3d6-ff", "ovs_interfaceid": "cc6fc3d6-ffdc-41b7-a019-9c9523f037b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.551626] env[68674]: DEBUG nova.network.neutron [req-f711adb1-6fed-4e8d-acaa-fc2258158d47 req-7f519ba1-4285-491c-8dec-18826097ef19 service nova] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Updated VIF entry in instance network info cache for port e9c61ddf-0f69-4349-8c95-a0246ea52982. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1032.551626] env[68674]: DEBUG nova.network.neutron [req-f711adb1-6fed-4e8d-acaa-fc2258158d47 req-7f519ba1-4285-491c-8dec-18826097ef19 service nova] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Updating instance_info_cache with network_info: [{"id": "e9c61ddf-0f69-4349-8c95-a0246ea52982", "address": "fa:16:3e:2d:77:7a", "network": {"id": "1b0c763a-1b06-4dfb-9376-f9d411619180", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1366824526-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "43f00e26b76347d0bd40df46ac3acbcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "939c05b6-8f31-4f3a-95ac-6297e0bd243e", "external-id": "nsx-vlan-transportzone-825", "segmentation_id": 825, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9c61ddf-0f", "ovs_interfaceid": "e9c61ddf-0f69-4349-8c95-a0246ea52982", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.687464] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.718822] env[68674]: DEBUG nova.compute.manager [None req-97c03ab8-c484-4020-8a71-61ea873d3a38 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1032.719133] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-97c03ab8-c484-4020-8a71-61ea873d3a38 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1032.719726] env[68674]: DEBUG nova.compute.manager [None req-99ee7910-83f1-4c76-8a41-916861474c49 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1032.719978] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-99ee7910-83f1-4c76-8a41-916861474c49 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1032.721143] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a00c092b-a9da-42d3-9139-16f17750fbcf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.724901] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61305429-bc54-4f32-a679-f44978c13f49 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.738385] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-97c03ab8-c484-4020-8a71-61ea873d3a38 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1032.738843] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-99ee7910-83f1-4c76-8a41-916861474c49 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1032.739335] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3ca87d4c-003f-44ec-9bd9-ea5fea2619a6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.743900] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a4bc97fc-4a64-42b0-9d7c-2d52680b89ec {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.753317] env[68674]: DEBUG oslo_vmware.api [None req-4c036620-cbff-4808-975e-1c2460e8e6f1 tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Task: {'id': task-3240655, 'name': PowerOffVM_Task, 'duration_secs': 0.368958} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.755427] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c036620-cbff-4808-975e-1c2460e8e6f1 tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1032.755617] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4c036620-cbff-4808-975e-1c2460e8e6f1 tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1032.755968] env[68674]: DEBUG oslo_vmware.api [None req-99ee7910-83f1-4c76-8a41-916861474c49 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 1032.755968] env[68674]: value = "task-3240660" [ 1032.755968] env[68674]: _type = "Task" [ 1032.755968] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.756281] env[68674]: DEBUG oslo_vmware.api [None req-97c03ab8-c484-4020-8a71-61ea873d3a38 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 1032.756281] env[68674]: value = "task-3240659" [ 1032.756281] env[68674]: _type = "Task" [ 1032.756281] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.756537] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-194d8d4c-c7ae-43e4-a27e-5c98dabe51a9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.776028] env[68674]: DEBUG oslo_vmware.api [None req-99ee7910-83f1-4c76-8a41-916861474c49 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240660, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.778888] env[68674]: DEBUG oslo_vmware.api [None req-97c03ab8-c484-4020-8a71-61ea873d3a38 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240659, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.807287] env[68674]: DEBUG oslo_vmware.api [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f66cf1-7ee3-2a09-c4fa-c6e88b36956d, 'name': SearchDatastore_Task, 'duration_secs': 0.012171} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.809978] env[68674]: DEBUG oslo_concurrency.lockutils [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1032.810280] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] e9bebb3b-78ff-42b1-a350-efd1db5c6eaa/e9bebb3b-78ff-42b1-a350-efd1db5c6eaa.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1032.810736] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6737754c-0526-47c1-9407-8058a323bc0c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.818776] env[68674]: DEBUG oslo_vmware.api [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1032.818776] env[68674]: value = "task-3240662" [ 1032.818776] env[68674]: _type = "Task" [ 1032.818776] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.831398] env[68674]: DEBUG oslo_vmware.api [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240662, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.976650] env[68674]: DEBUG nova.compute.manager [req-8302faf8-950c-4f6f-9110-655be4f239c1 req-ec981e74-6e48-4dab-bf98-55123af08366 service nova] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Received event network-changed-cc6fc3d6-ffdc-41b7-a019-9c9523f037b0 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1032.976930] env[68674]: DEBUG nova.compute.manager [req-8302faf8-950c-4f6f-9110-655be4f239c1 req-ec981e74-6e48-4dab-bf98-55123af08366 service nova] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Refreshing instance network info cache due to event network-changed-cc6fc3d6-ffdc-41b7-a019-9c9523f037b0. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1032.977129] env[68674]: DEBUG oslo_concurrency.lockutils [req-8302faf8-950c-4f6f-9110-655be4f239c1 req-ec981e74-6e48-4dab-bf98-55123af08366 service nova] Acquiring lock "refresh_cache-8740a794-a772-4260-aeb1-51762a586fe2" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.979571] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f0e960c-25b1-4a7e-9930-adbb5b0fbefc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.988432] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e3879b-f2e7-4a79-887d-83b70a724e04 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.021901] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89a71641-04c4-466a-a9a2-6a85f7952350 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.025143] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Releasing lock "refresh_cache-8740a794-a772-4260-aeb1-51762a586fe2" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1033.025472] env[68674]: DEBUG nova.compute.manager [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Instance network_info: |[{"id": "cc6fc3d6-ffdc-41b7-a019-9c9523f037b0", "address": "fa:16:3e:2b:09:2a", "network": {"id": "c0c4733f-8d0b-4cee-883f-2ad57ed16158", "bridge": "br-int", "label": "tempest-ServersTestJSON-40114649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cee54e456084086866d08b098a24b64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc6fc3d6-ff", "ovs_interfaceid": "cc6fc3d6-ffdc-41b7-a019-9c9523f037b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1033.026235] env[68674]: DEBUG oslo_concurrency.lockutils [req-8302faf8-950c-4f6f-9110-655be4f239c1 req-ec981e74-6e48-4dab-bf98-55123af08366 service nova] Acquired lock "refresh_cache-8740a794-a772-4260-aeb1-51762a586fe2" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1033.026450] env[68674]: DEBUG nova.network.neutron [req-8302faf8-950c-4f6f-9110-655be4f239c1 req-ec981e74-6e48-4dab-bf98-55123af08366 service nova] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Refreshing network info cache for port cc6fc3d6-ffdc-41b7-a019-9c9523f037b0 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1033.027755] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:09:2a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cc6fc3d6-ffdc-41b7-a019-9c9523f037b0', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1033.035384] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1033.038256] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1033.039499] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-59b24ca0-36e5-4a76-a379-0e387301b9b9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.055510] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af6af0a8-bf92-408b-920c-3698db6ca46e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.060261] env[68674]: DEBUG oslo_concurrency.lockutils [req-f711adb1-6fed-4e8d-acaa-fc2258158d47 req-7f519ba1-4285-491c-8dec-18826097ef19 service nova] Releasing lock "refresh_cache-e9bebb3b-78ff-42b1-a350-efd1db5c6eaa" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1033.060879] env[68674]: DEBUG nova.compute.manager [req-f711adb1-6fed-4e8d-acaa-fc2258158d47 req-7f519ba1-4285-491c-8dec-18826097ef19 service nova] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Received event network-vif-deleted-1e919367-5786-4b72-b1e9-31aed453f5fc {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1033.060879] env[68674]: DEBUG nova.compute.manager [req-f711adb1-6fed-4e8d-acaa-fc2258158d47 req-7f519ba1-4285-491c-8dec-18826097ef19 service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Received event network-changed-270836ed-f229-45ed-b23b-58f26fa997be {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1033.060978] env[68674]: DEBUG nova.compute.manager [req-f711adb1-6fed-4e8d-acaa-fc2258158d47 req-7f519ba1-4285-491c-8dec-18826097ef19 service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Refreshing instance network info cache due to event network-changed-270836ed-f229-45ed-b23b-58f26fa997be. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1033.061185] env[68674]: DEBUG oslo_concurrency.lockutils [req-f711adb1-6fed-4e8d-acaa-fc2258158d47 req-7f519ba1-4285-491c-8dec-18826097ef19 service nova] Acquiring lock "refresh_cache-66f4ab32-ef66-4d1d-93b6-775d59ce3c41" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.061321] env[68674]: DEBUG oslo_concurrency.lockutils [req-f711adb1-6fed-4e8d-acaa-fc2258158d47 req-7f519ba1-4285-491c-8dec-18826097ef19 service nova] Acquired lock "refresh_cache-66f4ab32-ef66-4d1d-93b6-775d59ce3c41" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1033.061470] env[68674]: DEBUG nova.network.neutron [req-f711adb1-6fed-4e8d-acaa-fc2258158d47 req-7f519ba1-4285-491c-8dec-18826097ef19 service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Refreshing network info cache for port 270836ed-f229-45ed-b23b-58f26fa997be {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1033.079796] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1033.079796] env[68674]: value = "task-3240663" [ 1033.079796] env[68674]: _type = "Task" [ 1033.079796] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.081136] env[68674]: DEBUG nova.compute.provider_tree [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1033.095189] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240663, 'name': CreateVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.279492] env[68674]: DEBUG oslo_vmware.api [None req-99ee7910-83f1-4c76-8a41-916861474c49 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240660, 'name': PowerOffVM_Task, 'duration_secs': 0.207875} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.282471] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-99ee7910-83f1-4c76-8a41-916861474c49 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1033.282697] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-99ee7910-83f1-4c76-8a41-916861474c49 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1033.282953] env[68674]: DEBUG oslo_vmware.api [None req-97c03ab8-c484-4020-8a71-61ea873d3a38 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240659, 'name': PowerOffVM_Task, 'duration_secs': 0.239574} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.283206] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b6ae59dc-484f-4fba-8bee-b69696bbcaf5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.285016] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-97c03ab8-c484-4020-8a71-61ea873d3a38 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1033.285214] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-97c03ab8-c484-4020-8a71-61ea873d3a38 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1033.285464] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4e7eaac6-326e-44ca-bba5-66fb34e6483d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.329133] env[68674]: DEBUG oslo_vmware.api [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240662, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.584489] env[68674]: DEBUG nova.scheduler.client.report [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1033.600299] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240663, 'name': CreateVM_Task, 'duration_secs': 0.501326} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.600430] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1033.601341] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.601341] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1033.602770] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1033.602770] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d69f0c9b-2445-41ce-997c-1f928b2bb6c3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.608568] env[68674]: DEBUG oslo_vmware.api [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1033.608568] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527cbb44-c3e2-3c9c-1159-91a3e5432bdd" [ 1033.608568] env[68674]: _type = "Task" [ 1033.608568] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.619831] env[68674]: DEBUG oslo_vmware.api [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527cbb44-c3e2-3c9c-1159-91a3e5432bdd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.829912] env[68674]: DEBUG oslo_vmware.api [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240662, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.519284} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.830301] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] e9bebb3b-78ff-42b1-a350-efd1db5c6eaa/e9bebb3b-78ff-42b1-a350-efd1db5c6eaa.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1033.830508] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1033.830768] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c237ea1e-4f55-478a-883d-5670ace20f26 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.838988] env[68674]: DEBUG oslo_vmware.api [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1033.838988] env[68674]: value = "task-3240666" [ 1033.838988] env[68674]: _type = "Task" [ 1033.838988] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.847607] env[68674]: DEBUG oslo_vmware.api [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240666, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.937738] env[68674]: DEBUG nova.network.neutron [req-8302faf8-950c-4f6f-9110-655be4f239c1 req-ec981e74-6e48-4dab-bf98-55123af08366 service nova] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Updated VIF entry in instance network info cache for port cc6fc3d6-ffdc-41b7-a019-9c9523f037b0. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1033.938122] env[68674]: DEBUG nova.network.neutron [req-8302faf8-950c-4f6f-9110-655be4f239c1 req-ec981e74-6e48-4dab-bf98-55123af08366 service nova] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Updating instance_info_cache with network_info: [{"id": "cc6fc3d6-ffdc-41b7-a019-9c9523f037b0", "address": "fa:16:3e:2b:09:2a", "network": {"id": "c0c4733f-8d0b-4cee-883f-2ad57ed16158", "bridge": "br-int", "label": "tempest-ServersTestJSON-40114649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cee54e456084086866d08b098a24b64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc6fc3d6-ff", "ovs_interfaceid": "cc6fc3d6-ffdc-41b7-a019-9c9523f037b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1033.984573] env[68674]: DEBUG nova.network.neutron [req-f711adb1-6fed-4e8d-acaa-fc2258158d47 req-7f519ba1-4285-491c-8dec-18826097ef19 service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Updated VIF entry in instance network info cache for port 270836ed-f229-45ed-b23b-58f26fa997be. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1033.984977] env[68674]: DEBUG nova.network.neutron [req-f711adb1-6fed-4e8d-acaa-fc2258158d47 req-7f519ba1-4285-491c-8dec-18826097ef19 service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Updating instance_info_cache with network_info: [{"id": "270836ed-f229-45ed-b23b-58f26fa997be", "address": "fa:16:3e:50:c3:4d", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap270836ed-f2", "ovs_interfaceid": "270836ed-f229-45ed-b23b-58f26fa997be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1034.094206] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.934s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1034.096706] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.278s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.096947] env[68674]: DEBUG nova.objects.instance [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lazy-loading 'resources' on Instance uuid 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1034.113505] env[68674]: INFO nova.scheduler.client.report [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Deleted allocations for instance cab97ca7-968b-4d40-bb1f-2244469e1b56 [ 1034.121530] env[68674]: DEBUG oslo_vmware.api [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527cbb44-c3e2-3c9c-1159-91a3e5432bdd, 'name': SearchDatastore_Task, 'duration_secs': 0.011601} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.121806] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1034.122038] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1034.122279] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.122426] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1034.122601] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1034.123087] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cb7f5153-c2b6-4a56-abd6-ecaf9fe9a981 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.136155] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1034.136368] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1034.138336] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99d6089a-0bb7-46e2-822f-1193e0ede793 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.144246] env[68674]: DEBUG oslo_vmware.api [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1034.144246] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52bdb488-9def-cd8c-11e1-8c51ff916877" [ 1034.144246] env[68674]: _type = "Task" [ 1034.144246] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.154121] env[68674]: DEBUG oslo_vmware.api [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52bdb488-9def-cd8c-11e1-8c51ff916877, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.292449] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4c036620-cbff-4808-975e-1c2460e8e6f1 tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1034.292771] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4c036620-cbff-4808-975e-1c2460e8e6f1 tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1034.292947] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c036620-cbff-4808-975e-1c2460e8e6f1 tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Deleting the datastore file [datastore1] fa8c58b7-a462-437f-b1ed-57fef6aa3903 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1034.293266] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-be9ba8b4-8224-4db8-948f-af3dcfbb178d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.297360] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-97c03ab8-c484-4020-8a71-61ea873d3a38 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1034.297572] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-97c03ab8-c484-4020-8a71-61ea873d3a38 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1034.297755] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-97c03ab8-c484-4020-8a71-61ea873d3a38 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Deleting the datastore file [datastore1] f029042f-d80b-453e-adc9-1e65d7da7aaf {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1034.298068] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e582dcb3-91aa-4e66-b316-7d82ffe427f9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.302562] env[68674]: DEBUG oslo_vmware.api [None req-4c036620-cbff-4808-975e-1c2460e8e6f1 tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Waiting for the task: (returnval){ [ 1034.302562] env[68674]: value = "task-3240667" [ 1034.302562] env[68674]: _type = "Task" [ 1034.302562] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.307267] env[68674]: DEBUG oslo_vmware.api [None req-97c03ab8-c484-4020-8a71-61ea873d3a38 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for the task: (returnval){ [ 1034.307267] env[68674]: value = "task-3240668" [ 1034.307267] env[68674]: _type = "Task" [ 1034.307267] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.313766] env[68674]: DEBUG oslo_vmware.api [None req-4c036620-cbff-4808-975e-1c2460e8e6f1 tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Task: {'id': task-3240667, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.319733] env[68674]: DEBUG oslo_vmware.api [None req-97c03ab8-c484-4020-8a71-61ea873d3a38 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240668, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.323308] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-99ee7910-83f1-4c76-8a41-916861474c49 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1034.323517] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-99ee7910-83f1-4c76-8a41-916861474c49 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1034.323850] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-99ee7910-83f1-4c76-8a41-916861474c49 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Deleting the datastore file [datastore1] 63d6c185-db2c-4ede-a716-9a0dd432ab1f {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1034.324175] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9f301d9e-628d-4791-8b29-4036d4f7bb0a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.331949] env[68674]: DEBUG oslo_vmware.api [None req-99ee7910-83f1-4c76-8a41-916861474c49 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 1034.331949] env[68674]: value = "task-3240669" [ 1034.331949] env[68674]: _type = "Task" [ 1034.331949] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.340152] env[68674]: DEBUG oslo_vmware.api [None req-99ee7910-83f1-4c76-8a41-916861474c49 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240669, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.347483] env[68674]: DEBUG oslo_vmware.api [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240666, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079369} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.347732] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1034.348464] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87334767-e5f6-4951-bf1b-18ea6f2d82e7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.372597] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] e9bebb3b-78ff-42b1-a350-efd1db5c6eaa/e9bebb3b-78ff-42b1-a350-efd1db5c6eaa.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1034.372918] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8a7ebb05-33c2-4ede-ba62-bd9dd4f5082b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.395698] env[68674]: DEBUG oslo_vmware.api [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1034.395698] env[68674]: value = "task-3240670" [ 1034.395698] env[68674]: _type = "Task" [ 1034.395698] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.404482] env[68674]: DEBUG oslo_vmware.api [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240670, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.442843] env[68674]: DEBUG oslo_concurrency.lockutils [req-8302faf8-950c-4f6f-9110-655be4f239c1 req-ec981e74-6e48-4dab-bf98-55123af08366 service nova] Releasing lock "refresh_cache-8740a794-a772-4260-aeb1-51762a586fe2" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1034.443154] env[68674]: DEBUG nova.compute.manager [req-8302faf8-950c-4f6f-9110-655be4f239c1 req-ec981e74-6e48-4dab-bf98-55123af08366 service nova] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Received event network-changed-4d94c698-e74c-4238-8f2e-ead75015687e {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1034.443328] env[68674]: DEBUG nova.compute.manager [req-8302faf8-950c-4f6f-9110-655be4f239c1 req-ec981e74-6e48-4dab-bf98-55123af08366 service nova] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Refreshing instance network info cache due to event network-changed-4d94c698-e74c-4238-8f2e-ead75015687e. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1034.443538] env[68674]: DEBUG oslo_concurrency.lockutils [req-8302faf8-950c-4f6f-9110-655be4f239c1 req-ec981e74-6e48-4dab-bf98-55123af08366 service nova] Acquiring lock "refresh_cache-142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.443757] env[68674]: DEBUG oslo_concurrency.lockutils [req-8302faf8-950c-4f6f-9110-655be4f239c1 req-ec981e74-6e48-4dab-bf98-55123af08366 service nova] Acquired lock "refresh_cache-142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1034.443929] env[68674]: DEBUG nova.network.neutron [req-8302faf8-950c-4f6f-9110-655be4f239c1 req-ec981e74-6e48-4dab-bf98-55123af08366 service nova] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Refreshing network info cache for port 4d94c698-e74c-4238-8f2e-ead75015687e {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1034.488468] env[68674]: DEBUG oslo_concurrency.lockutils [req-f711adb1-6fed-4e8d-acaa-fc2258158d47 req-7f519ba1-4285-491c-8dec-18826097ef19 service nova] Releasing lock "refresh_cache-66f4ab32-ef66-4d1d-93b6-775d59ce3c41" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1034.488468] env[68674]: DEBUG nova.compute.manager [req-f711adb1-6fed-4e8d-acaa-fc2258158d47 req-7f519ba1-4285-491c-8dec-18826097ef19 service nova] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Received event network-vif-plugged-cc6fc3d6-ffdc-41b7-a019-9c9523f037b0 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1034.488468] env[68674]: DEBUG oslo_concurrency.lockutils [req-f711adb1-6fed-4e8d-acaa-fc2258158d47 req-7f519ba1-4285-491c-8dec-18826097ef19 service nova] Acquiring lock "8740a794-a772-4260-aeb1-51762a586fe2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.488468] env[68674]: DEBUG oslo_concurrency.lockutils [req-f711adb1-6fed-4e8d-acaa-fc2258158d47 req-7f519ba1-4285-491c-8dec-18826097ef19 service nova] Lock "8740a794-a772-4260-aeb1-51762a586fe2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.488468] env[68674]: DEBUG oslo_concurrency.lockutils [req-f711adb1-6fed-4e8d-acaa-fc2258158d47 req-7f519ba1-4285-491c-8dec-18826097ef19 service nova] Lock "8740a794-a772-4260-aeb1-51762a586fe2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1034.488782] env[68674]: DEBUG nova.compute.manager [req-f711adb1-6fed-4e8d-acaa-fc2258158d47 req-7f519ba1-4285-491c-8dec-18826097ef19 service nova] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] No waiting events found dispatching network-vif-plugged-cc6fc3d6-ffdc-41b7-a019-9c9523f037b0 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1034.488782] env[68674]: WARNING nova.compute.manager [req-f711adb1-6fed-4e8d-acaa-fc2258158d47 req-7f519ba1-4285-491c-8dec-18826097ef19 service nova] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Received unexpected event network-vif-plugged-cc6fc3d6-ffdc-41b7-a019-9c9523f037b0 for instance with vm_state building and task_state spawning. [ 1034.515682] env[68674]: DEBUG oslo_concurrency.lockutils [None req-10f041d7-f859-414b-a667-9de4c6021745 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "2d02adff-9fbf-4889-99e4-4efde5a51b33" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.516079] env[68674]: DEBUG oslo_concurrency.lockutils [None req-10f041d7-f859-414b-a667-9de4c6021745 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "2d02adff-9fbf-4889-99e4-4efde5a51b33" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.620920] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25cd34e7-4343-4d01-9d47-0b06023bb21e tempest-ServerShowV254Test-1102079489 tempest-ServerShowV254Test-1102079489-project-member] Lock "cab97ca7-968b-4d40-bb1f-2244469e1b56" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.185s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1034.657244] env[68674]: DEBUG oslo_vmware.api [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52bdb488-9def-cd8c-11e1-8c51ff916877, 'name': SearchDatastore_Task, 'duration_secs': 0.049308} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.658048] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc432e86-9347-4abb-a92b-673168a44a5b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.665972] env[68674]: DEBUG oslo_vmware.api [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1034.665972] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c1ce10-3143-565a-2696-769eadc44b9a" [ 1034.665972] env[68674]: _type = "Task" [ 1034.665972] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.674453] env[68674]: DEBUG oslo_vmware.api [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c1ce10-3143-565a-2696-769eadc44b9a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.814360] env[68674]: DEBUG oslo_vmware.api [None req-4c036620-cbff-4808-975e-1c2460e8e6f1 tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Task: {'id': task-3240667, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.260244} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.817297] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c036620-cbff-4808-975e-1c2460e8e6f1 tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1034.817495] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4c036620-cbff-4808-975e-1c2460e8e6f1 tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1034.817693] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4c036620-cbff-4808-975e-1c2460e8e6f1 tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1034.817874] env[68674]: INFO nova.compute.manager [None req-4c036620-cbff-4808-975e-1c2460e8e6f1 tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Took 2.62 seconds to destroy the instance on the hypervisor. [ 1034.818133] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4c036620-cbff-4808-975e-1c2460e8e6f1 tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1034.818515] env[68674]: DEBUG nova.compute.manager [-] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1034.818614] env[68674]: DEBUG nova.network.neutron [-] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1034.822919] env[68674]: DEBUG oslo_vmware.api [None req-97c03ab8-c484-4020-8a71-61ea873d3a38 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Task: {'id': task-3240668, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.279744} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.823799] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-97c03ab8-c484-4020-8a71-61ea873d3a38 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1034.823799] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-97c03ab8-c484-4020-8a71-61ea873d3a38 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1034.823909] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-97c03ab8-c484-4020-8a71-61ea873d3a38 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1034.824039] env[68674]: INFO nova.compute.manager [None req-97c03ab8-c484-4020-8a71-61ea873d3a38 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Took 2.10 seconds to destroy the instance on the hypervisor. [ 1034.824245] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-97c03ab8-c484-4020-8a71-61ea873d3a38 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1034.824436] env[68674]: DEBUG nova.compute.manager [-] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1034.824528] env[68674]: DEBUG nova.network.neutron [-] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1034.841310] env[68674]: DEBUG oslo_vmware.api [None req-99ee7910-83f1-4c76-8a41-916861474c49 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240669, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.242154} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.843944] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-99ee7910-83f1-4c76-8a41-916861474c49 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1034.844162] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-99ee7910-83f1-4c76-8a41-916861474c49 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1034.844374] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-99ee7910-83f1-4c76-8a41-916861474c49 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1034.844555] env[68674]: INFO nova.compute.manager [None req-99ee7910-83f1-4c76-8a41-916861474c49 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Took 2.12 seconds to destroy the instance on the hypervisor. [ 1034.844803] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-99ee7910-83f1-4c76-8a41-916861474c49 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1034.845198] env[68674]: DEBUG nova.compute.manager [-] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1034.845293] env[68674]: DEBUG nova.network.neutron [-] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1034.860622] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea50a236-59f9-4580-929a-69b952979367 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.868813] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e0d5635-8b7d-47e9-b86b-1080da79daa2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.908030] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d4d47f0-7d62-4775-ba72-5fe31d444e96 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.924823] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec0c3203-09ea-4935-a685-982f6d1bf4d2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.928943] env[68674]: DEBUG oslo_vmware.api [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240670, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.940796] env[68674]: DEBUG nova.compute.provider_tree [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1035.019514] env[68674]: INFO nova.compute.manager [None req-10f041d7-f859-414b-a667-9de4c6021745 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Detaching volume 2110fa31-61e9-4ce5-a495-f1f566fee58d [ 1035.068583] env[68674]: INFO nova.virt.block_device [None req-10f041d7-f859-414b-a667-9de4c6021745 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Attempting to driver detach volume 2110fa31-61e9-4ce5-a495-f1f566fee58d from mountpoint /dev/sdb [ 1035.068892] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-10f041d7-f859-414b-a667-9de4c6021745 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Volume detach. Driver type: vmdk {{(pid=68674) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1035.068990] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-10f041d7-f859-414b-a667-9de4c6021745 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647627', 'volume_id': '2110fa31-61e9-4ce5-a495-f1f566fee58d', 'name': 'volume-2110fa31-61e9-4ce5-a495-f1f566fee58d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2d02adff-9fbf-4889-99e4-4efde5a51b33', 'attached_at': '', 'detached_at': '', 'volume_id': '2110fa31-61e9-4ce5-a495-f1f566fee58d', 'serial': '2110fa31-61e9-4ce5-a495-f1f566fee58d'} {{(pid=68674) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1035.069894] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8564fb28-24a9-49e4-82fb-92a1e715c851 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.097685] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c753318f-7c36-47ef-a9fb-d37de48a3427 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.105252] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06220264-cf77-4e03-a626-30378ebf6638 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.130594] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-757001b8-1973-4182-b5b8-b4381a174290 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.146736] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-10f041d7-f859-414b-a667-9de4c6021745 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] The volume has not been displaced from its original location: [datastore1] volume-2110fa31-61e9-4ce5-a495-f1f566fee58d/volume-2110fa31-61e9-4ce5-a495-f1f566fee58d.vmdk. No consolidation needed. {{(pid=68674) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1035.152045] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-10f041d7-f859-414b-a667-9de4c6021745 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Reconfiguring VM instance instance-0000004d to detach disk 2001 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1035.152351] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-36b33b31-f157-4e4b-bbc4-5b796a1b2676 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.171502] env[68674]: DEBUG oslo_vmware.api [None req-10f041d7-f859-414b-a667-9de4c6021745 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 1035.171502] env[68674]: value = "task-3240672" [ 1035.171502] env[68674]: _type = "Task" [ 1035.171502] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.175122] env[68674]: DEBUG oslo_vmware.api [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c1ce10-3143-565a-2696-769eadc44b9a, 'name': SearchDatastore_Task, 'duration_secs': 0.011271} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.178121] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1035.178254] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 8740a794-a772-4260-aeb1-51762a586fe2/8740a794-a772-4260-aeb1-51762a586fe2.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1035.178497] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-097ef0ff-ddf7-4b71-82d6-e8c5a74dd21f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.186946] env[68674]: DEBUG oslo_vmware.api [None req-10f041d7-f859-414b-a667-9de4c6021745 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240672, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.190492] env[68674]: DEBUG oslo_vmware.api [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1035.190492] env[68674]: value = "task-3240673" [ 1035.190492] env[68674]: _type = "Task" [ 1035.190492] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.200386] env[68674]: DEBUG oslo_vmware.api [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240673, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.333656] env[68674]: DEBUG nova.network.neutron [req-8302faf8-950c-4f6f-9110-655be4f239c1 req-ec981e74-6e48-4dab-bf98-55123af08366 service nova] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Updated VIF entry in instance network info cache for port 4d94c698-e74c-4238-8f2e-ead75015687e. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1035.334120] env[68674]: DEBUG nova.network.neutron [req-8302faf8-950c-4f6f-9110-655be4f239c1 req-ec981e74-6e48-4dab-bf98-55123af08366 service nova] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Updating instance_info_cache with network_info: [{"id": "4d94c698-e74c-4238-8f2e-ead75015687e", "address": "fa:16:3e:2c:a1:73", "network": {"id": "e5c1d0d2-3458-4788-9640-4e14ad781436", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1292108367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.240", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9bc5a5f88cdd441fbb0df17cab2fcecc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8cbc9b8f-ce19-4262-bf4d-88cd4f259a1c", "external-id": "nsx-vlan-transportzone-630", "segmentation_id": 630, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d94c698-e7", "ovs_interfaceid": "4d94c698-e74c-4238-8f2e-ead75015687e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.414077] env[68674]: DEBUG oslo_vmware.api [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240670, 'name': ReconfigVM_Task, 'duration_secs': 0.573904} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.414077] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Reconfigured VM instance instance-00000064 to attach disk [datastore2] e9bebb3b-78ff-42b1-a350-efd1db5c6eaa/e9bebb3b-78ff-42b1-a350-efd1db5c6eaa.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1035.415061] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-58baf214-635b-4f63-abed-3bcb6a7f2f21 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.423908] env[68674]: DEBUG oslo_vmware.api [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1035.423908] env[68674]: value = "task-3240674" [ 1035.423908] env[68674]: _type = "Task" [ 1035.423908] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.434585] env[68674]: DEBUG oslo_vmware.api [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240674, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.466063] env[68674]: ERROR nova.scheduler.client.report [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] [req-3d54e441-5012-4adc-a720-d774302b9f97] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ade3f042-7427-494b-9654-0b65e074850c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-3d54e441-5012-4adc-a720-d774302b9f97"}]} [ 1035.496736] env[68674]: DEBUG nova.scheduler.client.report [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Refreshing inventories for resource provider ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1035.512413] env[68674]: DEBUG nova.scheduler.client.report [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Updating ProviderTree inventory for provider ade3f042-7427-494b-9654-0b65e074850c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1035.512680] env[68674]: DEBUG nova.compute.provider_tree [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1035.528151] env[68674]: DEBUG nova.scheduler.client.report [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Refreshing aggregate associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, aggregates: None {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1035.554566] env[68674]: DEBUG nova.scheduler.client.report [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Refreshing trait associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1035.687452] env[68674]: DEBUG oslo_vmware.api [None req-10f041d7-f859-414b-a667-9de4c6021745 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240672, 'name': ReconfigVM_Task, 'duration_secs': 0.31136} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.687752] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-10f041d7-f859-414b-a667-9de4c6021745 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Reconfigured VM instance instance-0000004d to detach disk 2001 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1035.692724] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c35c8090-7a96-471c-9e1c-866dd461a122 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.721022] env[68674]: DEBUG oslo_vmware.api [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240673, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.517397} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.721022] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 8740a794-a772-4260-aeb1-51762a586fe2/8740a794-a772-4260-aeb1-51762a586fe2.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1035.721022] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1035.721022] env[68674]: DEBUG oslo_vmware.api [None req-10f041d7-f859-414b-a667-9de4c6021745 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 1035.721022] env[68674]: value = "task-3240675" [ 1035.721022] env[68674]: _type = "Task" [ 1035.721022] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.721022] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1e6d4aee-6ffc-4819-b31b-3afdab166f80 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.732941] env[68674]: DEBUG oslo_vmware.api [None req-10f041d7-f859-414b-a667-9de4c6021745 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240675, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.737475] env[68674]: DEBUG oslo_vmware.api [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1035.737475] env[68674]: value = "task-3240676" [ 1035.737475] env[68674]: _type = "Task" [ 1035.737475] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.815146] env[68674]: DEBUG nova.compute.manager [req-41921278-4038-4239-85d1-56b8b632f180 req-ad3daa73-ef14-4e97-a7be-c2455bea71d7 service nova] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Received event network-vif-deleted-eb762aca-5fbd-45f4-a81e-77d2c5d7aaf7 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1035.815397] env[68674]: INFO nova.compute.manager [req-41921278-4038-4239-85d1-56b8b632f180 req-ad3daa73-ef14-4e97-a7be-c2455bea71d7 service nova] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Neutron deleted interface eb762aca-5fbd-45f4-a81e-77d2c5d7aaf7; detaching it from the instance and deleting it from the info cache [ 1035.815704] env[68674]: DEBUG nova.network.neutron [req-41921278-4038-4239-85d1-56b8b632f180 req-ad3daa73-ef14-4e97-a7be-c2455bea71d7 service nova] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.836920] env[68674]: DEBUG oslo_concurrency.lockutils [req-8302faf8-950c-4f6f-9110-655be4f239c1 req-ec981e74-6e48-4dab-bf98-55123af08366 service nova] Releasing lock "refresh_cache-142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1035.939672] env[68674]: DEBUG oslo_vmware.api [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240674, 'name': Rename_Task, 'duration_secs': 0.222645} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.940189] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1035.940189] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b570d693-8520-46bc-87e7-185f8a43affb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.945487] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b297a2c-fe39-4c98-a4e0-8bb75d87cf65 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.949706] env[68674]: DEBUG oslo_vmware.api [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1035.949706] env[68674]: value = "task-3240677" [ 1035.949706] env[68674]: _type = "Task" [ 1035.949706] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.956516] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ffee6b5-92ed-4079-935f-da14ca3c8eb9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.963014] env[68674]: DEBUG oslo_vmware.api [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240677, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.992054] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1ac0daf-6852-4109-90c4-fd75d4b24baf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.000062] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64bc15ad-29b6-4e31-abc2-d8576ab247d7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.014177] env[68674]: DEBUG nova.compute.provider_tree [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1036.246453] env[68674]: DEBUG oslo_vmware.api [None req-10f041d7-f859-414b-a667-9de4c6021745 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240675, 'name': ReconfigVM_Task, 'duration_secs': 0.181038} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.247931] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-10f041d7-f859-414b-a667-9de4c6021745 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647627', 'volume_id': '2110fa31-61e9-4ce5-a495-f1f566fee58d', 'name': 'volume-2110fa31-61e9-4ce5-a495-f1f566fee58d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2d02adff-9fbf-4889-99e4-4efde5a51b33', 'attached_at': '', 'detached_at': '', 'volume_id': '2110fa31-61e9-4ce5-a495-f1f566fee58d', 'serial': '2110fa31-61e9-4ce5-a495-f1f566fee58d'} {{(pid=68674) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1036.253806] env[68674]: DEBUG oslo_vmware.api [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240676, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.09004} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.256445] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1036.256445] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54a3f4c6-879d-4a29-89aa-03c33253a5e8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.281393] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] 8740a794-a772-4260-aeb1-51762a586fe2/8740a794-a772-4260-aeb1-51762a586fe2.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1036.281779] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-27a86be8-c181-4108-adeb-48406a4164ca {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.296744] env[68674]: DEBUG nova.network.neutron [-] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.304828] env[68674]: DEBUG oslo_vmware.api [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1036.304828] env[68674]: value = "task-3240678" [ 1036.304828] env[68674]: _type = "Task" [ 1036.304828] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.314392] env[68674]: DEBUG oslo_vmware.api [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240678, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.318100] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-316f2596-149c-43a1-801f-6f00e5d6ef3f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.329271] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a467bf7d-5a11-4cd8-8576-7ef333616ffd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.366278] env[68674]: DEBUG nova.compute.manager [req-41921278-4038-4239-85d1-56b8b632f180 req-ad3daa73-ef14-4e97-a7be-c2455bea71d7 service nova] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Detach interface failed, port_id=eb762aca-5fbd-45f4-a81e-77d2c5d7aaf7, reason: Instance f029042f-d80b-453e-adc9-1e65d7da7aaf could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1036.464804] env[68674]: DEBUG oslo_vmware.api [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240677, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.569354] env[68674]: DEBUG nova.scheduler.client.report [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Updated inventory for provider ade3f042-7427-494b-9654-0b65e074850c with generation 143 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1036.569654] env[68674]: DEBUG nova.compute.provider_tree [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Updating resource provider ade3f042-7427-494b-9654-0b65e074850c generation from 143 to 144 during operation: update_inventory {{(pid=68674) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1036.569839] env[68674]: DEBUG nova.compute.provider_tree [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1036.733169] env[68674]: DEBUG nova.network.neutron [-] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.743352] env[68674]: DEBUG nova.network.neutron [-] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.799016] env[68674]: INFO nova.compute.manager [-] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Took 1.97 seconds to deallocate network for instance. [ 1036.800586] env[68674]: DEBUG nova.objects.instance [None req-10f041d7-f859-414b-a667-9de4c6021745 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lazy-loading 'flavor' on Instance uuid 2d02adff-9fbf-4889-99e4-4efde5a51b33 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1036.818377] env[68674]: DEBUG oslo_vmware.api [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240678, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.855941] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-90ad8ae5-25d4-49e0-a146-7907f0ce615d tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Volume attach. Driver type: vmdk {{(pid=68674) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1036.856253] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-90ad8ae5-25d4-49e0-a146-7907f0ce615d tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647666', 'volume_id': 'b2708da3-c713-480b-98a2-3fe862ad5593', 'name': 'volume-b2708da3-c713-480b-98a2-3fe862ad5593', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2a7a6269-65a8-402c-b174-a4a46d20a33a', 'attached_at': '', 'detached_at': '', 'volume_id': 'b2708da3-c713-480b-98a2-3fe862ad5593', 'serial': 'b2708da3-c713-480b-98a2-3fe862ad5593'} {{(pid=68674) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1036.857118] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-242e8872-cef6-476e-b3a0-9292948d9532 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.878376] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3d3c9a4-41af-4fa5-9704-dae78c1018be {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.905479] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-90ad8ae5-25d4-49e0-a146-7907f0ce615d tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] volume-b2708da3-c713-480b-98a2-3fe862ad5593/volume-b2708da3-c713-480b-98a2-3fe862ad5593.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1036.905777] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fcc9bbeb-d455-4b86-bb6c-2af58776bd46 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.928648] env[68674]: DEBUG oslo_vmware.api [None req-90ad8ae5-25d4-49e0-a146-7907f0ce615d tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1036.928648] env[68674]: value = "task-3240679" [ 1036.928648] env[68674]: _type = "Task" [ 1036.928648] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.937652] env[68674]: DEBUG oslo_vmware.api [None req-90ad8ae5-25d4-49e0-a146-7907f0ce615d tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240679, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.962143] env[68674]: DEBUG oslo_vmware.api [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240677, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.076430] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.980s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1037.078866] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.249s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1037.080420] env[68674]: INFO nova.compute.claims [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1037.098279] env[68674]: INFO nova.scheduler.client.report [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Deleted allocations for instance 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031 [ 1037.236114] env[68674]: INFO nova.compute.manager [-] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Took 2.42 seconds to deallocate network for instance. [ 1037.245639] env[68674]: INFO nova.compute.manager [-] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Took 2.40 seconds to deallocate network for instance. [ 1037.312982] env[68674]: DEBUG oslo_concurrency.lockutils [None req-97c03ab8-c484-4020-8a71-61ea873d3a38 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1037.319661] env[68674]: DEBUG oslo_vmware.api [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240678, 'name': ReconfigVM_Task, 'duration_secs': 0.740695} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.319948] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Reconfigured VM instance instance-00000065 to attach disk [datastore1] 8740a794-a772-4260-aeb1-51762a586fe2/8740a794-a772-4260-aeb1-51762a586fe2.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1037.320994] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-36432c05-dcc5-49e7-89fa-fa87aed0058a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.327521] env[68674]: DEBUG oslo_vmware.api [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1037.327521] env[68674]: value = "task-3240680" [ 1037.327521] env[68674]: _type = "Task" [ 1037.327521] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.336965] env[68674]: DEBUG oslo_vmware.api [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240680, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.438871] env[68674]: DEBUG oslo_vmware.api [None req-90ad8ae5-25d4-49e0-a146-7907f0ce615d tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240679, 'name': ReconfigVM_Task, 'duration_secs': 0.378114} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.439160] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-90ad8ae5-25d4-49e0-a146-7907f0ce615d tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Reconfigured VM instance instance-00000058 to attach disk [datastore1] volume-b2708da3-c713-480b-98a2-3fe862ad5593/volume-b2708da3-c713-480b-98a2-3fe862ad5593.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1037.444449] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-214dc22c-0ff9-45e5-949a-52f16ce8f08e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.464376] env[68674]: DEBUG oslo_vmware.api [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240677, 'name': PowerOnVM_Task, 'duration_secs': 1.045849} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.465713] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1037.465947] env[68674]: INFO nova.compute.manager [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Took 10.13 seconds to spawn the instance on the hypervisor. [ 1037.466152] env[68674]: DEBUG nova.compute.manager [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1037.466479] env[68674]: DEBUG oslo_vmware.api [None req-90ad8ae5-25d4-49e0-a146-7907f0ce615d tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1037.466479] env[68674]: value = "task-3240681" [ 1037.466479] env[68674]: _type = "Task" [ 1037.466479] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.467170] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8641863-c584-466c-8f51-8d420f24ae3b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.482168] env[68674]: DEBUG oslo_vmware.api [None req-90ad8ae5-25d4-49e0-a146-7907f0ce615d tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240681, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.606193] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a303e1a3-6cdc-47f0-ad56-098d1d84ca68 tempest-ServerDiskConfigTestJSON-1890870491 tempest-ServerDiskConfigTestJSON-1890870491-project-member] Lock "79ee95b6-7321-4e33-a0e4-2c8ed1bc1031" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1037.744254] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4c036620-cbff-4808-975e-1c2460e8e6f1 tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1037.752353] env[68674]: DEBUG oslo_concurrency.lockutils [None req-99ee7910-83f1-4c76-8a41-916861474c49 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1037.812826] env[68674]: DEBUG oslo_concurrency.lockutils [None req-10f041d7-f859-414b-a667-9de4c6021745 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "2d02adff-9fbf-4889-99e4-4efde5a51b33" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.296s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1037.841165] env[68674]: DEBUG oslo_vmware.api [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240680, 'name': Rename_Task, 'duration_secs': 0.281531} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.841165] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1037.841165] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5b809c1a-840b-42a0-a379-eddb93df3b2a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.845828] env[68674]: DEBUG oslo_vmware.api [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1037.845828] env[68674]: value = "task-3240682" [ 1037.845828] env[68674]: _type = "Task" [ 1037.845828] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.851705] env[68674]: DEBUG nova.compute.manager [req-bcfd79e0-f166-4342-931f-8e8fbfdbfed2 req-2040e9fe-0a56-4039-857e-74b056ccca35 service nova] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Received event network-vif-deleted-e6a3416c-8601-4d3f-8b5b-74d43a100d6c {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1037.851705] env[68674]: DEBUG nova.compute.manager [req-bcfd79e0-f166-4342-931f-8e8fbfdbfed2 req-2040e9fe-0a56-4039-857e-74b056ccca35 service nova] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Received event network-vif-deleted-2cf52206-a0c1-4b57-886d-23df69181f20 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1037.856169] env[68674]: DEBUG oslo_vmware.api [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240682, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.979172] env[68674]: DEBUG oslo_vmware.api [None req-90ad8ae5-25d4-49e0-a146-7907f0ce615d tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240681, 'name': ReconfigVM_Task, 'duration_secs': 0.159655} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.979491] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-90ad8ae5-25d4-49e0-a146-7907f0ce615d tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647666', 'volume_id': 'b2708da3-c713-480b-98a2-3fe862ad5593', 'name': 'volume-b2708da3-c713-480b-98a2-3fe862ad5593', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2a7a6269-65a8-402c-b174-a4a46d20a33a', 'attached_at': '', 'detached_at': '', 'volume_id': 'b2708da3-c713-480b-98a2-3fe862ad5593', 'serial': 'b2708da3-c713-480b-98a2-3fe862ad5593'} {{(pid=68674) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1037.988827] env[68674]: INFO nova.compute.manager [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Took 23.63 seconds to build instance. [ 1038.327765] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f1cb5d-dad3-4576-b049-5ea58c1b5649 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.335801] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-887c772b-2350-44af-b3d1-bd8c67f989ef {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.370075] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b80dbeb-233c-4493-b4e7-9a9af3c2482e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.379928] env[68674]: DEBUG oslo_vmware.api [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240682, 'name': PowerOnVM_Task} progress is 87%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.383293] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff610369-3c4a-49cc-9f0e-ad56c960c471 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.396316] env[68674]: DEBUG nova.compute.provider_tree [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1038.491189] env[68674]: DEBUG oslo_concurrency.lockutils [None req-71f1575e-8bb1-43f6-9dbb-c39cb81ab313 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Lock "e9bebb3b-78ff-42b1-a350-efd1db5c6eaa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.145s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1038.799264] env[68674]: DEBUG oslo_concurrency.lockutils [None req-87974eb2-8bea-4e8e-8851-244bff13f2b2 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "2d02adff-9fbf-4889-99e4-4efde5a51b33" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1038.799591] env[68674]: DEBUG oslo_concurrency.lockutils [None req-87974eb2-8bea-4e8e-8851-244bff13f2b2 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "2d02adff-9fbf-4889-99e4-4efde5a51b33" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1038.799826] env[68674]: DEBUG oslo_concurrency.lockutils [None req-87974eb2-8bea-4e8e-8851-244bff13f2b2 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "2d02adff-9fbf-4889-99e4-4efde5a51b33-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1038.800418] env[68674]: DEBUG oslo_concurrency.lockutils [None req-87974eb2-8bea-4e8e-8851-244bff13f2b2 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "2d02adff-9fbf-4889-99e4-4efde5a51b33-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1038.800649] env[68674]: DEBUG oslo_concurrency.lockutils [None req-87974eb2-8bea-4e8e-8851-244bff13f2b2 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "2d02adff-9fbf-4889-99e4-4efde5a51b33-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1038.803316] env[68674]: INFO nova.compute.manager [None req-87974eb2-8bea-4e8e-8851-244bff13f2b2 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Terminating instance [ 1038.878135] env[68674]: DEBUG oslo_vmware.api [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240682, 'name': PowerOnVM_Task, 'duration_secs': 0.929191} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.878420] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1038.878629] env[68674]: INFO nova.compute.manager [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Took 9.11 seconds to spawn the instance on the hypervisor. [ 1038.878808] env[68674]: DEBUG nova.compute.manager [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1038.879780] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e5f336d-bd15-4309-9304-12acf69ac0b1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.899212] env[68674]: DEBUG nova.scheduler.client.report [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1039.020775] env[68674]: DEBUG nova.objects.instance [None req-90ad8ae5-25d4-49e0-a146-7907f0ce615d tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lazy-loading 'flavor' on Instance uuid 2a7a6269-65a8-402c-b174-a4a46d20a33a {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1039.054471] env[68674]: INFO nova.compute.manager [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Rescuing [ 1039.055029] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Acquiring lock "refresh_cache-e9bebb3b-78ff-42b1-a350-efd1db5c6eaa" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.055220] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Acquired lock "refresh_cache-e9bebb3b-78ff-42b1-a350-efd1db5c6eaa" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1039.055812] env[68674]: DEBUG nova.network.neutron [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1039.308436] env[68674]: DEBUG nova.compute.manager [None req-87974eb2-8bea-4e8e-8851-244bff13f2b2 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1039.309081] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-87974eb2-8bea-4e8e-8851-244bff13f2b2 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1039.313384] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0407fa7-5d74-4780-9b16-a6db29a1886e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.325881] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-87974eb2-8bea-4e8e-8851-244bff13f2b2 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1039.326155] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-82c6975e-f3d3-43e2-afbc-8c9b64059c92 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.331876] env[68674]: DEBUG oslo_vmware.api [None req-87974eb2-8bea-4e8e-8851-244bff13f2b2 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 1039.331876] env[68674]: value = "task-3240683" [ 1039.331876] env[68674]: _type = "Task" [ 1039.331876] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.340447] env[68674]: DEBUG oslo_vmware.api [None req-87974eb2-8bea-4e8e-8851-244bff13f2b2 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240683, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.402233] env[68674]: INFO nova.compute.manager [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Took 24.01 seconds to build instance. [ 1039.405140] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.325s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1039.405140] env[68674]: DEBUG nova.compute.manager [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1039.410467] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.721s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1039.413372] env[68674]: INFO nova.compute.claims [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1039.526379] env[68674]: DEBUG oslo_concurrency.lockutils [None req-90ad8ae5-25d4-49e0-a146-7907f0ce615d tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "2a7a6269-65a8-402c-b174-a4a46d20a33a" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.336s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1039.849398] env[68674]: DEBUG oslo_vmware.api [None req-87974eb2-8bea-4e8e-8851-244bff13f2b2 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240683, 'name': PowerOffVM_Task, 'duration_secs': 0.202817} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.849398] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-87974eb2-8bea-4e8e-8851-244bff13f2b2 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1039.849398] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-87974eb2-8bea-4e8e-8851-244bff13f2b2 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1039.849596] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1ec8d307-9336-46ba-9fb4-2acb24f99599 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.862541] env[68674]: DEBUG nova.network.neutron [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Updating instance_info_cache with network_info: [{"id": "e9c61ddf-0f69-4349-8c95-a0246ea52982", "address": "fa:16:3e:2d:77:7a", "network": {"id": "1b0c763a-1b06-4dfb-9376-f9d411619180", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1366824526-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "43f00e26b76347d0bd40df46ac3acbcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "939c05b6-8f31-4f3a-95ac-6297e0bd243e", "external-id": "nsx-vlan-transportzone-825", "segmentation_id": 825, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9c61ddf-0f", "ovs_interfaceid": "e9c61ddf-0f69-4349-8c95-a0246ea52982", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.903703] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1a34db30-d3e1-471a-8217-588fc176a370 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "8740a794-a772-4260-aeb1-51762a586fe2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.526s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1039.918745] env[68674]: DEBUG nova.compute.utils [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1039.923442] env[68674]: DEBUG nova.compute.manager [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1039.924173] env[68674]: DEBUG nova.network.neutron [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1039.952072] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d14c0226-cd27-450f-bfb5-6a944e653586 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "8740a794-a772-4260-aeb1-51762a586fe2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1039.952360] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d14c0226-cd27-450f-bfb5-6a944e653586 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "8740a794-a772-4260-aeb1-51762a586fe2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1039.952715] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d14c0226-cd27-450f-bfb5-6a944e653586 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "8740a794-a772-4260-aeb1-51762a586fe2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1039.953592] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d14c0226-cd27-450f-bfb5-6a944e653586 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "8740a794-a772-4260-aeb1-51762a586fe2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1039.953592] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d14c0226-cd27-450f-bfb5-6a944e653586 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "8740a794-a772-4260-aeb1-51762a586fe2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1039.956581] env[68674]: INFO nova.compute.manager [None req-d14c0226-cd27-450f-bfb5-6a944e653586 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Terminating instance [ 1039.977900] env[68674]: DEBUG nova.policy [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd28e9b76e01f463bbb375cbd9c51684f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '81afe76c94de4e94b53f15af0ef95e66', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 1040.216584] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-87974eb2-8bea-4e8e-8851-244bff13f2b2 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1040.216584] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-87974eb2-8bea-4e8e-8851-244bff13f2b2 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1040.216584] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-87974eb2-8bea-4e8e-8851-244bff13f2b2 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Deleting the datastore file [datastore2] 2d02adff-9fbf-4889-99e4-4efde5a51b33 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1040.216584] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b013ae7d-94c6-43b7-b68e-57b0ef076f48 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.222499] env[68674]: DEBUG oslo_vmware.api [None req-87974eb2-8bea-4e8e-8851-244bff13f2b2 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 1040.222499] env[68674]: value = "task-3240685" [ 1040.222499] env[68674]: _type = "Task" [ 1040.222499] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.231643] env[68674]: DEBUG oslo_vmware.api [None req-87974eb2-8bea-4e8e-8851-244bff13f2b2 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240685, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.276914] env[68674]: DEBUG nova.network.neutron [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Successfully created port: 097ab3f0-c9c4-48be-9bcb-78b1068e5192 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1040.366330] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Releasing lock "refresh_cache-e9bebb3b-78ff-42b1-a350-efd1db5c6eaa" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1040.424301] env[68674]: DEBUG nova.compute.manager [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1040.462134] env[68674]: DEBUG nova.compute.manager [None req-d14c0226-cd27-450f-bfb5-6a944e653586 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1040.466081] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d14c0226-cd27-450f-bfb5-6a944e653586 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1040.466081] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-889cf983-12c0-4c3a-9227-1d27531a5110 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.470994] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d14c0226-cd27-450f-bfb5-6a944e653586 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1040.471256] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ed58c0da-d9c4-4e9a-9234-f39c368ed5bc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.478567] env[68674]: DEBUG oslo_vmware.api [None req-d14c0226-cd27-450f-bfb5-6a944e653586 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1040.478567] env[68674]: value = "task-3240686" [ 1040.478567] env[68674]: _type = "Task" [ 1040.478567] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.493088] env[68674]: DEBUG oslo_vmware.api [None req-d14c0226-cd27-450f-bfb5-6a944e653586 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240686, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.715053] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a96be18-8aaa-4175-ac41-a9e78344782f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.729272] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8729c5d0-df87-49bc-87df-35fc3e0a3f83 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.738583] env[68674]: DEBUG oslo_vmware.api [None req-87974eb2-8bea-4e8e-8851-244bff13f2b2 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240685, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.267335} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.763782] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-87974eb2-8bea-4e8e-8851-244bff13f2b2 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1040.763900] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-87974eb2-8bea-4e8e-8851-244bff13f2b2 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1040.764099] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-87974eb2-8bea-4e8e-8851-244bff13f2b2 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1040.764278] env[68674]: INFO nova.compute.manager [None req-87974eb2-8bea-4e8e-8851-244bff13f2b2 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Took 1.46 seconds to destroy the instance on the hypervisor. [ 1040.764528] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-87974eb2-8bea-4e8e-8851-244bff13f2b2 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1040.765323] env[68674]: DEBUG nova.compute.manager [-] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1040.765430] env[68674]: DEBUG nova.network.neutron [-] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1040.767582] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfad354e-d47a-4450-aecf-8fee1f2ad529 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.775409] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d6410b8-ad58-492c-a875-79333b50bfe0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.788270] env[68674]: DEBUG nova.compute.provider_tree [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1040.877928] env[68674]: DEBUG nova.compute.manager [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Stashing vm_state: active {{(pid=68674) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1040.990977] env[68674]: DEBUG oslo_vmware.api [None req-d14c0226-cd27-450f-bfb5-6a944e653586 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240686, 'name': PowerOffVM_Task, 'duration_secs': 0.262422} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.991456] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d14c0226-cd27-450f-bfb5-6a944e653586 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1040.991676] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d14c0226-cd27-450f-bfb5-6a944e653586 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1040.991981] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d638d90b-d9c3-46a3-be27-31d48db204b1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.075156] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d14c0226-cd27-450f-bfb5-6a944e653586 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1041.075416] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d14c0226-cd27-450f-bfb5-6a944e653586 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1041.075672] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-d14c0226-cd27-450f-bfb5-6a944e653586 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Deleting the datastore file [datastore1] 8740a794-a772-4260-aeb1-51762a586fe2 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1041.075991] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d562aed7-bd38-4889-86dc-6489a0ff95d6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.084221] env[68674]: DEBUG oslo_vmware.api [None req-d14c0226-cd27-450f-bfb5-6a944e653586 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1041.084221] env[68674]: value = "task-3240688" [ 1041.084221] env[68674]: _type = "Task" [ 1041.084221] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.094365] env[68674]: DEBUG oslo_vmware.api [None req-d14c0226-cd27-450f-bfb5-6a944e653586 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240688, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.260028] env[68674]: DEBUG nova.compute.manager [req-cec8b32f-b6e3-4727-aa97-4d036230a2d7 req-88f94e81-af4a-4a8e-bb91-f7866ec53847 service nova] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Received event network-vif-deleted-e6ae43a2-a24c-4187-aba3-c546140142b9 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1041.260028] env[68674]: INFO nova.compute.manager [req-cec8b32f-b6e3-4727-aa97-4d036230a2d7 req-88f94e81-af4a-4a8e-bb91-f7866ec53847 service nova] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Neutron deleted interface e6ae43a2-a24c-4187-aba3-c546140142b9; detaching it from the instance and deleting it from the info cache [ 1041.260590] env[68674]: DEBUG nova.network.neutron [req-cec8b32f-b6e3-4727-aa97-4d036230a2d7 req-88f94e81-af4a-4a8e-bb91-f7866ec53847 service nova] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.292618] env[68674]: DEBUG nova.scheduler.client.report [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1041.397883] env[68674]: DEBUG oslo_concurrency.lockutils [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1041.435799] env[68674]: DEBUG nova.compute.manager [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1041.463016] env[68674]: DEBUG nova.virt.hardware [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1041.463016] env[68674]: DEBUG nova.virt.hardware [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1041.463016] env[68674]: DEBUG nova.virt.hardware [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1041.463016] env[68674]: DEBUG nova.virt.hardware [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1041.463016] env[68674]: DEBUG nova.virt.hardware [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1041.463016] env[68674]: DEBUG nova.virt.hardware [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1041.463016] env[68674]: DEBUG nova.virt.hardware [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1041.463016] env[68674]: DEBUG nova.virt.hardware [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1041.463016] env[68674]: DEBUG nova.virt.hardware [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1041.463520] env[68674]: DEBUG nova.virt.hardware [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1041.463842] env[68674]: DEBUG nova.virt.hardware [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1041.465076] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73afaa37-c564-4c34-bf6e-928635f91575 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.473774] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46b69813-c519-464d-bf47-39763042be08 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.596983] env[68674]: DEBUG oslo_vmware.api [None req-d14c0226-cd27-450f-bfb5-6a944e653586 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240688, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.168632} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.597305] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-d14c0226-cd27-450f-bfb5-6a944e653586 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1041.597903] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d14c0226-cd27-450f-bfb5-6a944e653586 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1041.598074] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d14c0226-cd27-450f-bfb5-6a944e653586 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1041.598438] env[68674]: INFO nova.compute.manager [None req-d14c0226-cd27-450f-bfb5-6a944e653586 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1041.600766] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d14c0226-cd27-450f-bfb5-6a944e653586 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1041.600766] env[68674]: DEBUG nova.compute.manager [-] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1041.600766] env[68674]: DEBUG nova.network.neutron [-] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1041.740413] env[68674]: DEBUG nova.network.neutron [-] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.763077] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f60ccef2-2b75-4812-86d1-6a204755d541 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.774682] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2a66711-bf6f-46ee-89d5-074149c251c7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.814615] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.405s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1041.814615] env[68674]: DEBUG nova.compute.manager [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1041.819241] env[68674]: DEBUG nova.compute.manager [req-cec8b32f-b6e3-4727-aa97-4d036230a2d7 req-88f94e81-af4a-4a8e-bb91-f7866ec53847 service nova] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Detach interface failed, port_id=e6ae43a2-a24c-4187-aba3-c546140142b9, reason: Instance 2d02adff-9fbf-4889-99e4-4efde5a51b33 could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1041.819601] env[68674]: DEBUG oslo_concurrency.lockutils [None req-97c03ab8-c484-4020-8a71-61ea873d3a38 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.507s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1041.819959] env[68674]: DEBUG nova.objects.instance [None req-97c03ab8-c484-4020-8a71-61ea873d3a38 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lazy-loading 'resources' on Instance uuid f029042f-d80b-453e-adc9-1e65d7da7aaf {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1041.920890] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1041.921300] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c7bb892a-9489-4e91-bac5-a3a19f4154d4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.930268] env[68674]: DEBUG oslo_vmware.api [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1041.930268] env[68674]: value = "task-3240689" [ 1041.930268] env[68674]: _type = "Task" [ 1041.930268] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.939890] env[68674]: DEBUG oslo_vmware.api [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240689, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.173787] env[68674]: DEBUG nova.network.neutron [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Successfully updated port: 097ab3f0-c9c4-48be-9bcb-78b1068e5192 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1042.244908] env[68674]: INFO nova.compute.manager [-] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Took 1.48 seconds to deallocate network for instance. [ 1042.323019] env[68674]: DEBUG nova.compute.utils [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1042.323019] env[68674]: DEBUG nova.compute.manager [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1042.323019] env[68674]: DEBUG nova.network.neutron [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1042.396337] env[68674]: DEBUG nova.policy [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fd6c4d1912754a2ea44a65b455b7413c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '21163cbc3a5a4dc3abc832c4560c33e2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 1042.445112] env[68674]: DEBUG oslo_vmware.api [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240689, 'name': PowerOffVM_Task, 'duration_secs': 0.199339} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.445743] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1042.447973] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b3fccdd-2cdb-4750-a39f-7885666a79f7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.477278] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64ba66be-01bf-4f3a-b4f2-24125d99c267 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.513598] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1042.513598] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f3f67222-bebb-4cd8-a56e-9beb8aad043c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.521718] env[68674]: DEBUG oslo_vmware.api [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1042.521718] env[68674]: value = "task-3240690" [ 1042.521718] env[68674]: _type = "Task" [ 1042.521718] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.534678] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] VM already powered off {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1042.534678] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1042.534678] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1042.534678] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1042.534842] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1042.535546] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cbea7459-0897-49b0-a22b-8f5e63b2d993 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.549630] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1042.549852] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1042.550677] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90fc69dd-7954-4695-9603-00fed50a6cc7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.559119] env[68674]: DEBUG oslo_vmware.api [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1042.559119] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521e7efc-e95b-5275-183b-4c92855ccc94" [ 1042.559119] env[68674]: _type = "Task" [ 1042.559119] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.570385] env[68674]: DEBUG oslo_vmware.api [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521e7efc-e95b-5275-183b-4c92855ccc94, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.636351] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cf5782b-39b2-4969-85be-a9932c386e23 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.643797] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f2cad3c-997c-450b-ba12-06498304c000 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.675806] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9a77646-8e5f-44e0-b56a-79eda179044c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.678839] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "refresh_cache-6dc530e4-fb03-45dc-8d70-9f0e8731dfdc" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1042.679678] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquired lock "refresh_cache-6dc530e4-fb03-45dc-8d70-9f0e8731dfdc" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1042.679678] env[68674]: DEBUG nova.network.neutron [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1042.685515] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b2e00af-5fe1-4f93-a3a5-e4fa016c0216 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.690387] env[68674]: DEBUG nova.network.neutron [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Successfully created port: b3b3ebd7-0f64-4a86-b249-876c5962725c {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1042.704508] env[68674]: DEBUG nova.compute.provider_tree [None req-97c03ab8-c484-4020-8a71-61ea873d3a38 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1042.722584] env[68674]: DEBUG nova.network.neutron [-] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.753961] env[68674]: DEBUG oslo_concurrency.lockutils [None req-87974eb2-8bea-4e8e-8851-244bff13f2b2 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1042.831862] env[68674]: DEBUG nova.compute.manager [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1043.070443] env[68674]: DEBUG oslo_vmware.api [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521e7efc-e95b-5275-183b-4c92855ccc94, 'name': SearchDatastore_Task, 'duration_secs': 0.009657} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.073017] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-edb56869-edaf-4c98-8881-f732415bdb89 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.077038] env[68674]: DEBUG oslo_vmware.api [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1043.077038] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527da6f5-e4c6-6a99-bc3b-43f36ed83e82" [ 1043.077038] env[68674]: _type = "Task" [ 1043.077038] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.085241] env[68674]: DEBUG oslo_vmware.api [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527da6f5-e4c6-6a99-bc3b-43f36ed83e82, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.210102] env[68674]: DEBUG nova.scheduler.client.report [None req-97c03ab8-c484-4020-8a71-61ea873d3a38 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1043.220312] env[68674]: DEBUG nova.network.neutron [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1043.229326] env[68674]: INFO nova.compute.manager [-] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Took 1.63 seconds to deallocate network for instance. [ 1043.370260] env[68674]: DEBUG nova.compute.manager [req-1618a169-807c-4394-a81a-972d9b393735 req-cae5065d-61bf-46be-ae02-11843df61bd7 service nova] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Received event network-vif-plugged-097ab3f0-c9c4-48be-9bcb-78b1068e5192 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1043.370956] env[68674]: DEBUG oslo_concurrency.lockutils [req-1618a169-807c-4394-a81a-972d9b393735 req-cae5065d-61bf-46be-ae02-11843df61bd7 service nova] Acquiring lock "6dc530e4-fb03-45dc-8d70-9f0e8731dfdc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1043.373385] env[68674]: DEBUG oslo_concurrency.lockutils [req-1618a169-807c-4394-a81a-972d9b393735 req-cae5065d-61bf-46be-ae02-11843df61bd7 service nova] Lock "6dc530e4-fb03-45dc-8d70-9f0e8731dfdc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1043.373385] env[68674]: DEBUG oslo_concurrency.lockutils [req-1618a169-807c-4394-a81a-972d9b393735 req-cae5065d-61bf-46be-ae02-11843df61bd7 service nova] Lock "6dc530e4-fb03-45dc-8d70-9f0e8731dfdc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1043.373385] env[68674]: DEBUG nova.compute.manager [req-1618a169-807c-4394-a81a-972d9b393735 req-cae5065d-61bf-46be-ae02-11843df61bd7 service nova] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] No waiting events found dispatching network-vif-plugged-097ab3f0-c9c4-48be-9bcb-78b1068e5192 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1043.373385] env[68674]: WARNING nova.compute.manager [req-1618a169-807c-4394-a81a-972d9b393735 req-cae5065d-61bf-46be-ae02-11843df61bd7 service nova] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Received unexpected event network-vif-plugged-097ab3f0-c9c4-48be-9bcb-78b1068e5192 for instance with vm_state building and task_state spawning. [ 1043.373385] env[68674]: DEBUG nova.compute.manager [req-1618a169-807c-4394-a81a-972d9b393735 req-cae5065d-61bf-46be-ae02-11843df61bd7 service nova] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Received event network-changed-097ab3f0-c9c4-48be-9bcb-78b1068e5192 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1043.373385] env[68674]: DEBUG nova.compute.manager [req-1618a169-807c-4394-a81a-972d9b393735 req-cae5065d-61bf-46be-ae02-11843df61bd7 service nova] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Refreshing instance network info cache due to event network-changed-097ab3f0-c9c4-48be-9bcb-78b1068e5192. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1043.373385] env[68674]: DEBUG oslo_concurrency.lockutils [req-1618a169-807c-4394-a81a-972d9b393735 req-cae5065d-61bf-46be-ae02-11843df61bd7 service nova] Acquiring lock "refresh_cache-6dc530e4-fb03-45dc-8d70-9f0e8731dfdc" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.409062] env[68674]: DEBUG nova.network.neutron [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Updating instance_info_cache with network_info: [{"id": "097ab3f0-c9c4-48be-9bcb-78b1068e5192", "address": "fa:16:3e:03:0a:c4", "network": {"id": "14f41484-287c-4789-9e0c-fcc5f0e92e0d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-787923662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81afe76c94de4e94b53f15af0ef95e66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "877ed63d-906e-4bd5-a1fc-7e82d172d41e", "external-id": "nsx-vlan-transportzone-642", "segmentation_id": 642, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap097ab3f0-c9", "ovs_interfaceid": "097ab3f0-c9c4-48be-9bcb-78b1068e5192", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1043.588820] env[68674]: DEBUG oslo_vmware.api [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527da6f5-e4c6-6a99-bc3b-43f36ed83e82, 'name': SearchDatastore_Task, 'duration_secs': 0.010901} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.589857] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1043.589857] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] e9bebb3b-78ff-42b1-a350-efd1db5c6eaa/b84d9354-ef6b-46ca-9dae-6549fa89bbea-rescue.vmdk. {{(pid=68674) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1043.589857] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-236f6ce4-a0d1-4526-b41b-2318151d689b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.600018] env[68674]: DEBUG oslo_vmware.api [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1043.600018] env[68674]: value = "task-3240691" [ 1043.600018] env[68674]: _type = "Task" [ 1043.600018] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.606420] env[68674]: DEBUG oslo_vmware.api [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240691, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.714688] env[68674]: DEBUG oslo_concurrency.lockutils [None req-97c03ab8-c484-4020-8a71-61ea873d3a38 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.895s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1043.717191] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4c036620-cbff-4808-975e-1c2460e8e6f1 tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.973s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1043.717472] env[68674]: DEBUG nova.objects.instance [None req-4c036620-cbff-4808-975e-1c2460e8e6f1 tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Lazy-loading 'resources' on Instance uuid fa8c58b7-a462-437f-b1ed-57fef6aa3903 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1043.734024] env[68674]: INFO nova.scheduler.client.report [None req-97c03ab8-c484-4020-8a71-61ea873d3a38 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Deleted allocations for instance f029042f-d80b-453e-adc9-1e65d7da7aaf [ 1043.738870] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d14c0226-cd27-450f-bfb5-6a944e653586 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1043.846251] env[68674]: DEBUG nova.compute.manager [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1043.876211] env[68674]: DEBUG nova.virt.hardware [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1043.876677] env[68674]: DEBUG nova.virt.hardware [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1043.877042] env[68674]: DEBUG nova.virt.hardware [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1043.877373] env[68674]: DEBUG nova.virt.hardware [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1043.877643] env[68674]: DEBUG nova.virt.hardware [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1043.877935] env[68674]: DEBUG nova.virt.hardware [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1043.878325] env[68674]: DEBUG nova.virt.hardware [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1043.878632] env[68674]: DEBUG nova.virt.hardware [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1043.878936] env[68674]: DEBUG nova.virt.hardware [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1043.879243] env[68674]: DEBUG nova.virt.hardware [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1043.879623] env[68674]: DEBUG nova.virt.hardware [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1043.880906] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bbbd2a7-dce1-43b4-a262-7f958729d8db {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.896689] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee6e1505-13ac-4155-ba0c-22aec96375c9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.913371] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Releasing lock "refresh_cache-6dc530e4-fb03-45dc-8d70-9f0e8731dfdc" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1043.914143] env[68674]: DEBUG nova.compute.manager [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Instance network_info: |[{"id": "097ab3f0-c9c4-48be-9bcb-78b1068e5192", "address": "fa:16:3e:03:0a:c4", "network": {"id": "14f41484-287c-4789-9e0c-fcc5f0e92e0d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-787923662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81afe76c94de4e94b53f15af0ef95e66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "877ed63d-906e-4bd5-a1fc-7e82d172d41e", "external-id": "nsx-vlan-transportzone-642", "segmentation_id": 642, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap097ab3f0-c9", "ovs_interfaceid": "097ab3f0-c9c4-48be-9bcb-78b1068e5192", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1043.915579] env[68674]: DEBUG oslo_concurrency.lockutils [req-1618a169-807c-4394-a81a-972d9b393735 req-cae5065d-61bf-46be-ae02-11843df61bd7 service nova] Acquired lock "refresh_cache-6dc530e4-fb03-45dc-8d70-9f0e8731dfdc" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1043.915800] env[68674]: DEBUG nova.network.neutron [req-1618a169-807c-4394-a81a-972d9b393735 req-cae5065d-61bf-46be-ae02-11843df61bd7 service nova] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Refreshing network info cache for port 097ab3f0-c9c4-48be-9bcb-78b1068e5192 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1043.917513] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:03:0a:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '877ed63d-906e-4bd5-a1fc-7e82d172d41e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '097ab3f0-c9c4-48be-9bcb-78b1068e5192', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1043.926026] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Creating folder: Project (81afe76c94de4e94b53f15af0ef95e66). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1043.929683] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c4cdcf62-f10c-47d2-9193-496fe2b95e4d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.932052] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Acquiring lock "cbccde73-b903-47f7-9cbc-f0b376a03435" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1043.932348] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Lock "cbccde73-b903-47f7-9cbc-f0b376a03435" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1043.944419] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Created folder: Project (81afe76c94de4e94b53f15af0ef95e66) in parent group-v647377. [ 1043.944555] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Creating folder: Instances. Parent ref: group-v647668. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1043.945416] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-47d3ce5d-493a-4e0a-8767-b98d8d4fabdb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.956559] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Created folder: Instances in parent group-v647668. [ 1043.956836] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1043.957098] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1043.957329] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-60a93b2e-ffd2-4a2e-8bef-ea9f2776339f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.982197] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1043.982197] env[68674]: value = "task-3240694" [ 1043.982197] env[68674]: _type = "Task" [ 1043.982197] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.993269] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240694, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.107984] env[68674]: DEBUG oslo_vmware.api [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240691, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.47131} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.108287] env[68674]: INFO nova.virt.vmwareapi.ds_util [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] e9bebb3b-78ff-42b1-a350-efd1db5c6eaa/b84d9354-ef6b-46ca-9dae-6549fa89bbea-rescue.vmdk. [ 1044.109062] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa105aab-a043-44b4-9842-85ab0d602069 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.135967] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] e9bebb3b-78ff-42b1-a350-efd1db5c6eaa/b84d9354-ef6b-46ca-9dae-6549fa89bbea-rescue.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1044.136333] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-27e859cc-d896-46bc-9a11-20817a79a9ad {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.159425] env[68674]: DEBUG oslo_vmware.api [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1044.159425] env[68674]: value = "task-3240695" [ 1044.159425] env[68674]: _type = "Task" [ 1044.159425] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.165358] env[68674]: DEBUG oslo_vmware.api [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240695, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.244285] env[68674]: DEBUG oslo_concurrency.lockutils [None req-97c03ab8-c484-4020-8a71-61ea873d3a38 tempest-ServerRescueNegativeTestJSON-251508045 tempest-ServerRescueNegativeTestJSON-251508045-project-member] Lock "f029042f-d80b-453e-adc9-1e65d7da7aaf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.049s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1044.272938] env[68674]: DEBUG nova.network.neutron [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Successfully updated port: b3b3ebd7-0f64-4a86-b249-876c5962725c {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1044.434883] env[68674]: DEBUG nova.compute.manager [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1044.495831] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240694, 'name': CreateVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.511177] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-682b39a0-f3ae-4d25-b4ca-79e2ad8c08e2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.522139] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b862a25d-e62f-4540-8366-89336682b958 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.566323] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a55a4bc-e860-4dbf-b0cf-00d1d2906317 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.580312] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10a565f3-627b-4530-9117-5e3b2da83059 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.599348] env[68674]: DEBUG nova.compute.provider_tree [None req-4c036620-cbff-4808-975e-1c2460e8e6f1 tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1044.664512] env[68674]: DEBUG oslo_vmware.api [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240695, 'name': ReconfigVM_Task, 'duration_secs': 0.368143} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.664792] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Reconfigured VM instance instance-00000064 to attach disk [datastore2] e9bebb3b-78ff-42b1-a350-efd1db5c6eaa/b84d9354-ef6b-46ca-9dae-6549fa89bbea-rescue.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1044.665648] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c646c3c-1815-4c9e-9a3c-99be30c3ccc3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.691362] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-93a81b27-03cd-4bd5-b8ca-a6fe58f5c3b3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.708789] env[68674]: DEBUG oslo_vmware.api [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1044.708789] env[68674]: value = "task-3240696" [ 1044.708789] env[68674]: _type = "Task" [ 1044.708789] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.716601] env[68674]: DEBUG oslo_vmware.api [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240696, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.752026] env[68674]: DEBUG nova.network.neutron [req-1618a169-807c-4394-a81a-972d9b393735 req-cae5065d-61bf-46be-ae02-11843df61bd7 service nova] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Updated VIF entry in instance network info cache for port 097ab3f0-c9c4-48be-9bcb-78b1068e5192. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1044.752131] env[68674]: DEBUG nova.network.neutron [req-1618a169-807c-4394-a81a-972d9b393735 req-cae5065d-61bf-46be-ae02-11843df61bd7 service nova] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Updating instance_info_cache with network_info: [{"id": "097ab3f0-c9c4-48be-9bcb-78b1068e5192", "address": "fa:16:3e:03:0a:c4", "network": {"id": "14f41484-287c-4789-9e0c-fcc5f0e92e0d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-787923662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81afe76c94de4e94b53f15af0ef95e66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "877ed63d-906e-4bd5-a1fc-7e82d172d41e", "external-id": "nsx-vlan-transportzone-642", "segmentation_id": 642, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap097ab3f0-c9", "ovs_interfaceid": "097ab3f0-c9c4-48be-9bcb-78b1068e5192", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1044.775630] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "refresh_cache-bd3ae195-6e01-49d5-9fcf-9520273d9108" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.775773] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquired lock "refresh_cache-bd3ae195-6e01-49d5-9fcf-9520273d9108" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1044.775853] env[68674]: DEBUG nova.network.neutron [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1044.960898] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1044.995023] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240694, 'name': CreateVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.103734] env[68674]: DEBUG nova.scheduler.client.report [None req-4c036620-cbff-4808-975e-1c2460e8e6f1 tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1045.218705] env[68674]: DEBUG oslo_vmware.api [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240696, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.255177] env[68674]: DEBUG oslo_concurrency.lockutils [req-1618a169-807c-4394-a81a-972d9b393735 req-cae5065d-61bf-46be-ae02-11843df61bd7 service nova] Releasing lock "refresh_cache-6dc530e4-fb03-45dc-8d70-9f0e8731dfdc" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1045.255486] env[68674]: DEBUG nova.compute.manager [req-1618a169-807c-4394-a81a-972d9b393735 req-cae5065d-61bf-46be-ae02-11843df61bd7 service nova] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Received event network-vif-deleted-cc6fc3d6-ffdc-41b7-a019-9c9523f037b0 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1045.325921] env[68674]: DEBUG nova.network.neutron [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1045.399862] env[68674]: DEBUG nova.compute.manager [req-179280a4-d26c-4796-9e4d-288c8026e0da req-c27a5b21-24cc-4b02-870a-646f49cd2867 service nova] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Received event network-vif-plugged-b3b3ebd7-0f64-4a86-b249-876c5962725c {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1045.399912] env[68674]: DEBUG oslo_concurrency.lockutils [req-179280a4-d26c-4796-9e4d-288c8026e0da req-c27a5b21-24cc-4b02-870a-646f49cd2867 service nova] Acquiring lock "bd3ae195-6e01-49d5-9fcf-9520273d9108-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1045.400198] env[68674]: DEBUG oslo_concurrency.lockutils [req-179280a4-d26c-4796-9e4d-288c8026e0da req-c27a5b21-24cc-4b02-870a-646f49cd2867 service nova] Lock "bd3ae195-6e01-49d5-9fcf-9520273d9108-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1045.400286] env[68674]: DEBUG oslo_concurrency.lockutils [req-179280a4-d26c-4796-9e4d-288c8026e0da req-c27a5b21-24cc-4b02-870a-646f49cd2867 service nova] Lock "bd3ae195-6e01-49d5-9fcf-9520273d9108-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1045.400455] env[68674]: DEBUG nova.compute.manager [req-179280a4-d26c-4796-9e4d-288c8026e0da req-c27a5b21-24cc-4b02-870a-646f49cd2867 service nova] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] No waiting events found dispatching network-vif-plugged-b3b3ebd7-0f64-4a86-b249-876c5962725c {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1045.400622] env[68674]: WARNING nova.compute.manager [req-179280a4-d26c-4796-9e4d-288c8026e0da req-c27a5b21-24cc-4b02-870a-646f49cd2867 service nova] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Received unexpected event network-vif-plugged-b3b3ebd7-0f64-4a86-b249-876c5962725c for instance with vm_state building and task_state spawning. [ 1045.400776] env[68674]: DEBUG nova.compute.manager [req-179280a4-d26c-4796-9e4d-288c8026e0da req-c27a5b21-24cc-4b02-870a-646f49cd2867 service nova] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Received event network-changed-b3b3ebd7-0f64-4a86-b249-876c5962725c {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1045.400929] env[68674]: DEBUG nova.compute.manager [req-179280a4-d26c-4796-9e4d-288c8026e0da req-c27a5b21-24cc-4b02-870a-646f49cd2867 service nova] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Refreshing instance network info cache due to event network-changed-b3b3ebd7-0f64-4a86-b249-876c5962725c. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1045.401108] env[68674]: DEBUG oslo_concurrency.lockutils [req-179280a4-d26c-4796-9e4d-288c8026e0da req-c27a5b21-24cc-4b02-870a-646f49cd2867 service nova] Acquiring lock "refresh_cache-bd3ae195-6e01-49d5-9fcf-9520273d9108" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.495903] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240694, 'name': CreateVM_Task, 'duration_secs': 1.204652} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.496228] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1045.496804] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.496974] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1045.497301] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1045.497555] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09a1e875-912f-4f05-9204-88ef5dfab7dc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.502335] env[68674]: DEBUG oslo_vmware.api [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1045.502335] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5281d754-edd7-14db-cb23-ded55e39daea" [ 1045.502335] env[68674]: _type = "Task" [ 1045.502335] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.512930] env[68674]: DEBUG oslo_vmware.api [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5281d754-edd7-14db-cb23-ded55e39daea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.571194] env[68674]: DEBUG nova.network.neutron [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Updating instance_info_cache with network_info: [{"id": "b3b3ebd7-0f64-4a86-b249-876c5962725c", "address": "fa:16:3e:ca:a0:f1", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3b3ebd7-0f", "ovs_interfaceid": "b3b3ebd7-0f64-4a86-b249-876c5962725c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.608663] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4c036620-cbff-4808-975e-1c2460e8e6f1 tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.891s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1045.611838] env[68674]: DEBUG oslo_concurrency.lockutils [None req-99ee7910-83f1-4c76-8a41-916861474c49 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.859s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1045.611838] env[68674]: DEBUG nova.objects.instance [None req-99ee7910-83f1-4c76-8a41-916861474c49 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lazy-loading 'resources' on Instance uuid 63d6c185-db2c-4ede-a716-9a0dd432ab1f {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1045.632882] env[68674]: INFO nova.scheduler.client.report [None req-4c036620-cbff-4808-975e-1c2460e8e6f1 tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Deleted allocations for instance fa8c58b7-a462-437f-b1ed-57fef6aa3903 [ 1045.721420] env[68674]: DEBUG oslo_vmware.api [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240696, 'name': ReconfigVM_Task, 'duration_secs': 0.516339} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.721716] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1045.724783] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7aff66c8-e571-463a-9799-07e1e9a6d0a5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.734110] env[68674]: DEBUG oslo_vmware.api [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1045.734110] env[68674]: value = "task-3240697" [ 1045.734110] env[68674]: _type = "Task" [ 1045.734110] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.742216] env[68674]: DEBUG oslo_vmware.api [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240697, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.017074] env[68674]: DEBUG oslo_vmware.api [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5281d754-edd7-14db-cb23-ded55e39daea, 'name': SearchDatastore_Task, 'duration_secs': 0.009676} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.017074] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1046.018771] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1046.018771] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.018771] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1046.018771] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1046.018771] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4611886f-2a91-425b-a963-94e08e035085 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.033554] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1046.033554] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1046.033726] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7054d608-2b83-481f-b271-b4a0dbb302b1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.040247] env[68674]: DEBUG oslo_vmware.api [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1046.040247] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52dd8a18-4c50-53dd-02e3-fe9b3d563c33" [ 1046.040247] env[68674]: _type = "Task" [ 1046.040247] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.051901] env[68674]: DEBUG oslo_vmware.api [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52dd8a18-4c50-53dd-02e3-fe9b3d563c33, 'name': SearchDatastore_Task, 'duration_secs': 0.008782} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.052702] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ae9eb31-c921-4363-a465-aab2e60f66eb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.057932] env[68674]: DEBUG oslo_vmware.api [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1046.057932] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52aed892-a4eb-c3c7-d7bd-54fd037ff9fc" [ 1046.057932] env[68674]: _type = "Task" [ 1046.057932] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.066072] env[68674]: DEBUG oslo_vmware.api [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52aed892-a4eb-c3c7-d7bd-54fd037ff9fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.077387] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Releasing lock "refresh_cache-bd3ae195-6e01-49d5-9fcf-9520273d9108" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1046.077626] env[68674]: DEBUG nova.compute.manager [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Instance network_info: |[{"id": "b3b3ebd7-0f64-4a86-b249-876c5962725c", "address": "fa:16:3e:ca:a0:f1", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3b3ebd7-0f", "ovs_interfaceid": "b3b3ebd7-0f64-4a86-b249-876c5962725c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1046.078025] env[68674]: DEBUG oslo_concurrency.lockutils [req-179280a4-d26c-4796-9e4d-288c8026e0da req-c27a5b21-24cc-4b02-870a-646f49cd2867 service nova] Acquired lock "refresh_cache-bd3ae195-6e01-49d5-9fcf-9520273d9108" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1046.078118] env[68674]: DEBUG nova.network.neutron [req-179280a4-d26c-4796-9e4d-288c8026e0da req-c27a5b21-24cc-4b02-870a-646f49cd2867 service nova] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Refreshing network info cache for port b3b3ebd7-0f64-4a86-b249-876c5962725c {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1046.079398] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ca:a0:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea4fe416-47a6-4542-b59d-8c71ab4d6503', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b3b3ebd7-0f64-4a86-b249-876c5962725c', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1046.087682] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1046.088287] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1046.088520] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a1bebce2-5a32-45ec-bbe3-529c022f8229 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.109763] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1046.109763] env[68674]: value = "task-3240698" [ 1046.109763] env[68674]: _type = "Task" [ 1046.109763] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.120911] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240698, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.142043] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4c036620-cbff-4808-975e-1c2460e8e6f1 tempest-ServersV294TestFqdnHostnames-1083566170 tempest-ServersV294TestFqdnHostnames-1083566170-project-member] Lock "fa8c58b7-a462-437f-b1ed-57fef6aa3903" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.961s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1046.247531] env[68674]: DEBUG oslo_vmware.api [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240697, 'name': PowerOnVM_Task, 'duration_secs': 0.437686} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.247531] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1046.248423] env[68674]: DEBUG nova.compute.manager [None req-b651a01a-4efe-49c9-86b2-ee4edb024fea tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1046.249235] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-920be069-43d4-40bd-b484-44cd1ef24891 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.386050] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-540962f1-8769-4acb-9595-1f1bd9997135 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.394659] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db4fdd88-7dc4-45ed-bb08-7df74d0988bd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.426322] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ea20db3-4802-40ef-858f-72b2e2951985 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.434231] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7240fa78-d827-47fa-8df2-7211f308d36e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.448993] env[68674]: DEBUG nova.compute.provider_tree [None req-99ee7910-83f1-4c76-8a41-916861474c49 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1046.572586] env[68674]: DEBUG oslo_vmware.api [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52aed892-a4eb-c3c7-d7bd-54fd037ff9fc, 'name': SearchDatastore_Task, 'duration_secs': 0.010185} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.573056] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1046.573957] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc/6dc530e4-fb03-45dc-8d70-9f0e8731dfdc.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1046.573957] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bcb8aa36-2a84-4bee-af0c-50582638d235 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.580490] env[68674]: DEBUG oslo_vmware.api [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1046.580490] env[68674]: value = "task-3240699" [ 1046.580490] env[68674]: _type = "Task" [ 1046.580490] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.592164] env[68674]: DEBUG oslo_vmware.api [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240699, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.620798] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240698, 'name': CreateVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.861829] env[68674]: DEBUG nova.network.neutron [req-179280a4-d26c-4796-9e4d-288c8026e0da req-c27a5b21-24cc-4b02-870a-646f49cd2867 service nova] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Updated VIF entry in instance network info cache for port b3b3ebd7-0f64-4a86-b249-876c5962725c. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1046.861829] env[68674]: DEBUG nova.network.neutron [req-179280a4-d26c-4796-9e4d-288c8026e0da req-c27a5b21-24cc-4b02-870a-646f49cd2867 service nova] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Updating instance_info_cache with network_info: [{"id": "b3b3ebd7-0f64-4a86-b249-876c5962725c", "address": "fa:16:3e:ca:a0:f1", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3b3ebd7-0f", "ovs_interfaceid": "b3b3ebd7-0f64-4a86-b249-876c5962725c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.955037] env[68674]: DEBUG nova.scheduler.client.report [None req-99ee7910-83f1-4c76-8a41-916861474c49 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1047.097050] env[68674]: DEBUG oslo_vmware.api [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240699, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48105} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.097050] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc/6dc530e4-fb03-45dc-8d70-9f0e8731dfdc.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1047.097050] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1047.097050] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-06b9483c-00df-48bb-bed5-345fd29455b6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.105169] env[68674]: DEBUG oslo_vmware.api [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1047.105169] env[68674]: value = "task-3240700" [ 1047.105169] env[68674]: _type = "Task" [ 1047.105169] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.111579] env[68674]: DEBUG oslo_vmware.api [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240700, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.119341] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240698, 'name': CreateVM_Task, 'duration_secs': 0.569529} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.119525] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1047.120215] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.120380] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1047.120691] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1047.120970] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4256d70f-9b4d-4942-b65d-7b948b6e5bd6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.125487] env[68674]: DEBUG oslo_vmware.api [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 1047.125487] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b0417d-00fd-d611-6836-d18e5fff7149" [ 1047.125487] env[68674]: _type = "Task" [ 1047.125487] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.133519] env[68674]: DEBUG oslo_vmware.api [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b0417d-00fd-d611-6836-d18e5fff7149, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.363787] env[68674]: DEBUG oslo_concurrency.lockutils [req-179280a4-d26c-4796-9e4d-288c8026e0da req-c27a5b21-24cc-4b02-870a-646f49cd2867 service nova] Releasing lock "refresh_cache-bd3ae195-6e01-49d5-9fcf-9520273d9108" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1047.463355] env[68674]: DEBUG oslo_concurrency.lockutils [None req-99ee7910-83f1-4c76-8a41-916861474c49 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.852s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1047.466553] env[68674]: DEBUG oslo_concurrency.lockutils [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 6.068s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1047.490570] env[68674]: INFO nova.scheduler.client.report [None req-99ee7910-83f1-4c76-8a41-916861474c49 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Deleted allocations for instance 63d6c185-db2c-4ede-a716-9a0dd432ab1f [ 1047.614798] env[68674]: DEBUG oslo_vmware.api [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240700, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082629} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.615102] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1047.615912] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c793bcc7-58b9-49e4-9d28-87a13c4d893b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.641840] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc/6dc530e4-fb03-45dc-8d70-9f0e8731dfdc.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1047.645564] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f577e45a-57fd-4d8c-b6ba-6b5a133117f7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.668995] env[68674]: DEBUG oslo_vmware.api [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b0417d-00fd-d611-6836-d18e5fff7149, 'name': SearchDatastore_Task, 'duration_secs': 0.009868} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.670790] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1047.671527] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1047.672383] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.672383] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1047.672383] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1047.672993] env[68674]: DEBUG oslo_vmware.api [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1047.672993] env[68674]: value = "task-3240701" [ 1047.672993] env[68674]: _type = "Task" [ 1047.672993] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.673520] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-95a08b4c-89e4-4cea-b14a-16641b3a17ec {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.684649] env[68674]: DEBUG oslo_vmware.api [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240701, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.685935] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1047.686149] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1047.686886] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89300054-307f-418d-ab69-94f6217474e2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.692763] env[68674]: DEBUG oslo_vmware.api [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 1047.692763] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d1646d-0b97-c16d-d7c7-974c58d68039" [ 1047.692763] env[68674]: _type = "Task" [ 1047.692763] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.701264] env[68674]: DEBUG oslo_vmware.api [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d1646d-0b97-c16d-d7c7-974c58d68039, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.971741] env[68674]: INFO nova.compute.claims [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1048.000921] env[68674]: DEBUG oslo_concurrency.lockutils [None req-99ee7910-83f1-4c76-8a41-916861474c49 tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "63d6c185-db2c-4ede-a716-9a0dd432ab1f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.809s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1048.062054] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Acquiring lock "4214e971-ca72-4c9f-a355-78e5ad8d8219" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1048.062307] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Lock "4214e971-ca72-4c9f-a355-78e5ad8d8219" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1048.186594] env[68674]: DEBUG oslo_vmware.api [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240701, 'name': ReconfigVM_Task, 'duration_secs': 0.300371} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.187640] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Reconfigured VM instance instance-00000066 to attach disk [datastore1] 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc/6dc530e4-fb03-45dc-8d70-9f0e8731dfdc.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1048.187640] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2080fff2-5bb8-4a99-8069-c9d98cb1af7b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.198341] env[68674]: DEBUG oslo_vmware.api [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1048.198341] env[68674]: value = "task-3240702" [ 1048.198341] env[68674]: _type = "Task" [ 1048.198341] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.206112] env[68674]: DEBUG oslo_vmware.api [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d1646d-0b97-c16d-d7c7-974c58d68039, 'name': SearchDatastore_Task, 'duration_secs': 0.009627} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.206910] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8cc7e036-2832-4129-a8e0-14223b4858e7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.214136] env[68674]: DEBUG oslo_vmware.api [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240702, 'name': Rename_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.217886] env[68674]: DEBUG oslo_vmware.api [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 1048.217886] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b56105-4dee-06a5-1f6b-9b684f2d6a50" [ 1048.217886] env[68674]: _type = "Task" [ 1048.217886] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.227774] env[68674]: DEBUG oslo_vmware.api [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b56105-4dee-06a5-1f6b-9b684f2d6a50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.478419] env[68674]: INFO nova.compute.resource_tracker [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Updating resource usage from migration 0f1a5cb9-1589-4d4f-8424-b56d0d078357 [ 1048.564133] env[68674]: DEBUG nova.compute.manager [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1048.709324] env[68674]: DEBUG oslo_vmware.api [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240702, 'name': Rename_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.733804] env[68674]: DEBUG oslo_vmware.api [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b56105-4dee-06a5-1f6b-9b684f2d6a50, 'name': SearchDatastore_Task, 'duration_secs': 0.010535} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.734194] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1048.734463] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] bd3ae195-6e01-49d5-9fcf-9520273d9108/bd3ae195-6e01-49d5-9fcf-9520273d9108.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1048.734725] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-03daacf0-f382-48bc-96fd-053417b28f49 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.740512] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-436c7011-df6b-4238-8835-81808ddd4ae3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.744132] env[68674]: DEBUG oslo_vmware.api [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 1048.744132] env[68674]: value = "task-3240703" [ 1048.744132] env[68674]: _type = "Task" [ 1048.744132] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.753089] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8b7f015-5c9a-4404-bde6-d92b4957330d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.758081] env[68674]: DEBUG oslo_vmware.api [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240703, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.790424] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b041f015-3dce-4f6d-a817-6f1c0b3d6c97 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.797334] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a77e2ba0-0cac-48ee-99e9-7e1fe1496bf2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.811524] env[68674]: DEBUG nova.compute.provider_tree [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1049.085512] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1049.209402] env[68674]: DEBUG oslo_vmware.api [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240702, 'name': Rename_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.256449] env[68674]: DEBUG oslo_vmware.api [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240703, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.469101} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.256744] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] bd3ae195-6e01-49d5-9fcf-9520273d9108/bd3ae195-6e01-49d5-9fcf-9520273d9108.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1049.256960] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1049.257227] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ca32fb92-29ac-46cc-80d7-4acc619177ba {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.263399] env[68674]: DEBUG oslo_vmware.api [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 1049.263399] env[68674]: value = "task-3240704" [ 1049.263399] env[68674]: _type = "Task" [ 1049.263399] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.270359] env[68674]: DEBUG oslo_vmware.api [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240704, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.314750] env[68674]: DEBUG nova.scheduler.client.report [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1049.710610] env[68674]: DEBUG oslo_vmware.api [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240702, 'name': Rename_Task, 'duration_secs': 1.163782} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.710975] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1049.711238] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-373d9f0b-3217-432b-8e21-c8f82b328310 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.717752] env[68674]: DEBUG oslo_vmware.api [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1049.717752] env[68674]: value = "task-3240705" [ 1049.717752] env[68674]: _type = "Task" [ 1049.717752] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.726676] env[68674]: DEBUG oslo_vmware.api [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240705, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.772578] env[68674]: DEBUG oslo_vmware.api [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240704, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.124614} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.772852] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1049.773650] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e85f6431-0a3f-482f-b0d2-5c27fa52181a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.796473] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] bd3ae195-6e01-49d5-9fcf-9520273d9108/bd3ae195-6e01-49d5-9fcf-9520273d9108.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1049.797384] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-42793787-65fe-4e15-9a24-83ac1beefb9c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.816984] env[68674]: DEBUG oslo_vmware.api [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 1049.816984] env[68674]: value = "task-3240706" [ 1049.816984] env[68674]: _type = "Task" [ 1049.816984] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.820948] env[68674]: DEBUG oslo_concurrency.lockutils [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.355s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1049.821113] env[68674]: INFO nova.compute.manager [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Migrating [ 1049.827454] env[68674]: DEBUG oslo_concurrency.lockutils [None req-87974eb2-8bea-4e8e-8851-244bff13f2b2 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.074s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1049.827698] env[68674]: DEBUG nova.objects.instance [None req-87974eb2-8bea-4e8e-8851-244bff13f2b2 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lazy-loading 'resources' on Instance uuid 2d02adff-9fbf-4889-99e4-4efde5a51b33 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1049.840050] env[68674]: DEBUG oslo_vmware.api [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240706, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.948016] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquiring lock "ba4bfbb4-a89b-4ab6-964e-792647fd5a89" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1049.948016] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "ba4bfbb4-a89b-4ab6-964e-792647fd5a89" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1050.228903] env[68674]: DEBUG oslo_vmware.api [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240705, 'name': PowerOnVM_Task, 'duration_secs': 0.436743} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.228903] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1050.228903] env[68674]: INFO nova.compute.manager [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Took 8.79 seconds to spawn the instance on the hypervisor. [ 1050.229140] env[68674]: DEBUG nova.compute.manager [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1050.229881] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b311075a-3f2b-4765-a0d9-18019dcf8557 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.327076] env[68674]: DEBUG oslo_vmware.api [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240706, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.340401] env[68674]: DEBUG oslo_concurrency.lockutils [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "refresh_cache-2a7a6269-65a8-402c-b174-a4a46d20a33a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.340542] env[68674]: DEBUG oslo_concurrency.lockutils [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquired lock "refresh_cache-2a7a6269-65a8-402c-b174-a4a46d20a33a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1050.340746] env[68674]: DEBUG nova.network.neutron [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1050.449602] env[68674]: DEBUG nova.compute.manager [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1050.577178] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb420578-107f-414a-8415-472870b9d0aa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.585369] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-584a1480-bd07-4ea3-876d-fa699f9ac651 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.617652] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16205c8b-bde3-433f-b779-1bf410a196cd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.624048] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbb1afea-011a-4636-ac70-fdb8d5a0932c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.637722] env[68674]: DEBUG nova.compute.provider_tree [None req-87974eb2-8bea-4e8e-8851-244bff13f2b2 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1050.750959] env[68674]: INFO nova.compute.manager [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Took 19.94 seconds to build instance. [ 1050.830941] env[68674]: DEBUG oslo_vmware.api [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240706, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.972408] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1050.998562] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fa90d393-15d7-4251-9835-29ee950f0743 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "30731a3c-34ba-40c8-9b8f-2d867eff4f21" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1050.998828] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fa90d393-15d7-4251-9835-29ee950f0743 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "30731a3c-34ba-40c8-9b8f-2d867eff4f21" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1050.999017] env[68674]: DEBUG nova.compute.manager [None req-fa90d393-15d7-4251-9835-29ee950f0743 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1050.999889] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34e902a6-e00e-47d0-af23-95882ace3778 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.007737] env[68674]: DEBUG nova.compute.manager [None req-fa90d393-15d7-4251-9835-29ee950f0743 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68674) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1051.008744] env[68674]: DEBUG nova.objects.instance [None req-fa90d393-15d7-4251-9835-29ee950f0743 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lazy-loading 'flavor' on Instance uuid 30731a3c-34ba-40c8-9b8f-2d867eff4f21 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1051.076065] env[68674]: DEBUG nova.network.neutron [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Updating instance_info_cache with network_info: [{"id": "4fae8d88-2aaa-48bd-b0c4-72bc768efce3", "address": "fa:16:3e:f8:a7:c2", "network": {"id": "cd9a6296-fa96-4117-b8b5-3884d0d16745", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1543887384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61ea6bfeb37d470a970e9c98e4827ade", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fae8d88-2a", "ovs_interfaceid": "4fae8d88-2aaa-48bd-b0c4-72bc768efce3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1051.141289] env[68674]: DEBUG nova.scheduler.client.report [None req-87974eb2-8bea-4e8e-8851-244bff13f2b2 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1051.253279] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2f45b9f8-dd13-46be-9302-ecd1be9f8020 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "6dc530e4-fb03-45dc-8d70-9f0e8731dfdc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.454s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1051.328093] env[68674]: DEBUG oslo_vmware.api [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240706, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.515086] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e3f72d4c-93b8-4f33-a82d-4a90ff48fc8b tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "6dc530e4-fb03-45dc-8d70-9f0e8731dfdc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1051.515349] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e3f72d4c-93b8-4f33-a82d-4a90ff48fc8b tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "6dc530e4-fb03-45dc-8d70-9f0e8731dfdc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1051.516823] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e3f72d4c-93b8-4f33-a82d-4a90ff48fc8b tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "6dc530e4-fb03-45dc-8d70-9f0e8731dfdc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1051.516823] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e3f72d4c-93b8-4f33-a82d-4a90ff48fc8b tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "6dc530e4-fb03-45dc-8d70-9f0e8731dfdc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1051.516823] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e3f72d4c-93b8-4f33-a82d-4a90ff48fc8b tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "6dc530e4-fb03-45dc-8d70-9f0e8731dfdc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1051.517658] env[68674]: INFO nova.compute.manager [None req-e3f72d4c-93b8-4f33-a82d-4a90ff48fc8b tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Terminating instance [ 1051.578395] env[68674]: DEBUG oslo_concurrency.lockutils [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Releasing lock "refresh_cache-2a7a6269-65a8-402c-b174-a4a46d20a33a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1051.647066] env[68674]: DEBUG oslo_concurrency.lockutils [None req-87974eb2-8bea-4e8e-8851-244bff13f2b2 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.819s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1051.649370] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d14c0226-cd27-450f-bfb5-6a944e653586 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.911s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1051.649608] env[68674]: DEBUG nova.objects.instance [None req-d14c0226-cd27-450f-bfb5-6a944e653586 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lazy-loading 'resources' on Instance uuid 8740a794-a772-4260-aeb1-51762a586fe2 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1051.669494] env[68674]: INFO nova.scheduler.client.report [None req-87974eb2-8bea-4e8e-8851-244bff13f2b2 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Deleted allocations for instance 2d02adff-9fbf-4889-99e4-4efde5a51b33 [ 1051.812203] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1051.812506] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1051.828626] env[68674]: DEBUG oslo_vmware.api [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240706, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.016426] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa90d393-15d7-4251-9835-29ee950f0743 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1052.016771] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fef572de-2105-4fbc-9c77-c6ec8c8e0af3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.021400] env[68674]: DEBUG nova.compute.manager [None req-e3f72d4c-93b8-4f33-a82d-4a90ff48fc8b tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1052.021608] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e3f72d4c-93b8-4f33-a82d-4a90ff48fc8b tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1052.022433] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a92892af-296c-45ce-884e-17206841f493 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.026229] env[68674]: DEBUG oslo_vmware.api [None req-fa90d393-15d7-4251-9835-29ee950f0743 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1052.026229] env[68674]: value = "task-3240707" [ 1052.026229] env[68674]: _type = "Task" [ 1052.026229] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.032089] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3f72d4c-93b8-4f33-a82d-4a90ff48fc8b tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1052.032634] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-030ac171-fde7-4ea4-81a0-bbf4b18a5e0b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.037016] env[68674]: DEBUG oslo_vmware.api [None req-fa90d393-15d7-4251-9835-29ee950f0743 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3240707, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.040970] env[68674]: DEBUG oslo_vmware.api [None req-e3f72d4c-93b8-4f33-a82d-4a90ff48fc8b tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1052.040970] env[68674]: value = "task-3240708" [ 1052.040970] env[68674]: _type = "Task" [ 1052.040970] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.053877] env[68674]: DEBUG oslo_vmware.api [None req-e3f72d4c-93b8-4f33-a82d-4a90ff48fc8b tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240708, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.180197] env[68674]: DEBUG oslo_concurrency.lockutils [None req-87974eb2-8bea-4e8e-8851-244bff13f2b2 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "2d02adff-9fbf-4889-99e4-4efde5a51b33" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.380s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1052.324882] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1052.325145] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1052.325750] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1052.325898] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1052.326064] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1052.326583] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1052.326734] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68674) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1052.326893] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager.update_available_resource {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1052.331193] env[68674]: DEBUG oslo_vmware.api [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240706, 'name': ReconfigVM_Task, 'duration_secs': 2.13663} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.334100] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Reconfigured VM instance instance-00000067 to attach disk [datastore1] bd3ae195-6e01-49d5-9fcf-9520273d9108/bd3ae195-6e01-49d5-9fcf-9520273d9108.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1052.334921] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-29924cd6-f6ed-4601-8347-ceb901ebff69 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.341458] env[68674]: DEBUG oslo_vmware.api [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 1052.341458] env[68674]: value = "task-3240709" [ 1052.341458] env[68674]: _type = "Task" [ 1052.341458] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.352854] env[68674]: DEBUG oslo_vmware.api [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240709, 'name': Rename_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.406867] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be9e1df2-7804-47ec-87f2-d5e798d2eb0b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.414103] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b5979ba-60e1-467d-a5a6-536c93860c91 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.443489] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28f46f99-6761-4759-b3a6-caa47ea23f6a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.451320] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bf12b05-45d5-438a-bddb-6a085d543bf8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.464774] env[68674]: DEBUG nova.compute.provider_tree [None req-d14c0226-cd27-450f-bfb5-6a944e653586 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1052.538666] env[68674]: DEBUG oslo_vmware.api [None req-fa90d393-15d7-4251-9835-29ee950f0743 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3240707, 'name': PowerOffVM_Task, 'duration_secs': 0.230025} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.539069] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa90d393-15d7-4251-9835-29ee950f0743 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1052.539385] env[68674]: DEBUG nova.compute.manager [None req-fa90d393-15d7-4251-9835-29ee950f0743 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1052.540295] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-115c800e-5754-46ce-83ce-59af45361dd4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.554094] env[68674]: DEBUG oslo_vmware.api [None req-e3f72d4c-93b8-4f33-a82d-4a90ff48fc8b tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240708, 'name': PowerOffVM_Task, 'duration_secs': 0.22138} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.554619] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3f72d4c-93b8-4f33-a82d-4a90ff48fc8b tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1052.554786] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e3f72d4c-93b8-4f33-a82d-4a90ff48fc8b tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1052.555433] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-174c64c8-a220-4520-8b71-525a8af9599d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.620951] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e3f72d4c-93b8-4f33-a82d-4a90ff48fc8b tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1052.621206] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e3f72d4c-93b8-4f33-a82d-4a90ff48fc8b tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1052.621395] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3f72d4c-93b8-4f33-a82d-4a90ff48fc8b tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Deleting the datastore file [datastore1] 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1052.621645] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2eb08e26-e13f-4521-9479-45225e3348d9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.628662] env[68674]: DEBUG oslo_vmware.api [None req-e3f72d4c-93b8-4f33-a82d-4a90ff48fc8b tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1052.628662] env[68674]: value = "task-3240711" [ 1052.628662] env[68674]: _type = "Task" [ 1052.628662] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.636829] env[68674]: DEBUG oslo_vmware.api [None req-e3f72d4c-93b8-4f33-a82d-4a90ff48fc8b tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240711, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.833153] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1052.851941] env[68674]: DEBUG oslo_vmware.api [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240709, 'name': Rename_Task, 'duration_secs': 0.149722} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.852089] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1052.852362] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e8df1bb9-16f1-4c6a-bc07-55e6abc27e38 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.860878] env[68674]: DEBUG oslo_vmware.api [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 1052.860878] env[68674]: value = "task-3240712" [ 1052.860878] env[68674]: _type = "Task" [ 1052.860878] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.867361] env[68674]: DEBUG oslo_vmware.api [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240712, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.967576] env[68674]: DEBUG nova.scheduler.client.report [None req-d14c0226-cd27-450f-bfb5-6a944e653586 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1053.058376] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fa90d393-15d7-4251-9835-29ee950f0743 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "30731a3c-34ba-40c8-9b8f-2d867eff4f21" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.059s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1053.095403] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f040cfba-e97f-452f-b131-5ca31b19beae {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.117032] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Updating instance '2a7a6269-65a8-402c-b174-a4a46d20a33a' progress to 0 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1053.138279] env[68674]: DEBUG oslo_vmware.api [None req-e3f72d4c-93b8-4f33-a82d-4a90ff48fc8b tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240711, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15241} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.138548] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3f72d4c-93b8-4f33-a82d-4a90ff48fc8b tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1053.138734] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e3f72d4c-93b8-4f33-a82d-4a90ff48fc8b tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1053.138913] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e3f72d4c-93b8-4f33-a82d-4a90ff48fc8b tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1053.139106] env[68674]: INFO nova.compute.manager [None req-e3f72d4c-93b8-4f33-a82d-4a90ff48fc8b tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1053.139369] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e3f72d4c-93b8-4f33-a82d-4a90ff48fc8b tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1053.139566] env[68674]: DEBUG nova.compute.manager [-] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1053.139662] env[68674]: DEBUG nova.network.neutron [-] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1053.379514] env[68674]: DEBUG oslo_vmware.api [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240712, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.397710] env[68674]: DEBUG nova.objects.instance [None req-1d3e8999-be7b-4a8a-badf-94ce802dc23a tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lazy-loading 'flavor' on Instance uuid 30731a3c-34ba-40c8-9b8f-2d867eff4f21 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1053.449271] env[68674]: DEBUG nova.compute.manager [req-e4af796b-d828-4fd8-bb22-c72ab69a0b02 req-996e5420-a04b-4d46-a806-caa40261fdd1 service nova] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Received event network-vif-deleted-097ab3f0-c9c4-48be-9bcb-78b1068e5192 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1053.450014] env[68674]: INFO nova.compute.manager [req-e4af796b-d828-4fd8-bb22-c72ab69a0b02 req-996e5420-a04b-4d46-a806-caa40261fdd1 service nova] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Neutron deleted interface 097ab3f0-c9c4-48be-9bcb-78b1068e5192; detaching it from the instance and deleting it from the info cache [ 1053.450014] env[68674]: DEBUG nova.network.neutron [req-e4af796b-d828-4fd8-bb22-c72ab69a0b02 req-996e5420-a04b-4d46-a806-caa40261fdd1 service nova] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1053.472802] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d14c0226-cd27-450f-bfb5-6a944e653586 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.823s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1053.476192] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.515s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1053.477682] env[68674]: INFO nova.compute.claims [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1053.508751] env[68674]: INFO nova.scheduler.client.report [None req-d14c0226-cd27-450f-bfb5-6a944e653586 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Deleted allocations for instance 8740a794-a772-4260-aeb1-51762a586fe2 [ 1053.623452] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1053.623777] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7ebd4801-c893-41e2-8796-bbe1378ffe5a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.631065] env[68674]: DEBUG oslo_vmware.api [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1053.631065] env[68674]: value = "task-3240713" [ 1053.631065] env[68674]: _type = "Task" [ 1053.631065] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.640646] env[68674]: DEBUG oslo_vmware.api [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240713, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.869084] env[68674]: DEBUG oslo_vmware.api [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240712, 'name': PowerOnVM_Task, 'duration_secs': 0.85457} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.869366] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1053.869581] env[68674]: INFO nova.compute.manager [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Took 10.03 seconds to spawn the instance on the hypervisor. [ 1053.869760] env[68674]: DEBUG nova.compute.manager [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1053.870846] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f136fb7-00a5-40f8-96ea-52d805913f6b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.901970] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1d3e8999-be7b-4a8a-badf-94ce802dc23a tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "refresh_cache-30731a3c-34ba-40c8-9b8f-2d867eff4f21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1053.902211] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1d3e8999-be7b-4a8a-badf-94ce802dc23a tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquired lock "refresh_cache-30731a3c-34ba-40c8-9b8f-2d867eff4f21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1053.902408] env[68674]: DEBUG nova.network.neutron [None req-1d3e8999-be7b-4a8a-badf-94ce802dc23a tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1053.902590] env[68674]: DEBUG nova.objects.instance [None req-1d3e8999-be7b-4a8a-badf-94ce802dc23a tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lazy-loading 'info_cache' on Instance uuid 30731a3c-34ba-40c8-9b8f-2d867eff4f21 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1053.907265] env[68674]: DEBUG nova.network.neutron [-] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1053.952029] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-757c6a22-1467-404a-9bfe-1cd483f2b968 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.961596] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c20cd527-a087-4343-a731-f492974e664e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.995776] env[68674]: DEBUG nova.compute.manager [req-e4af796b-d828-4fd8-bb22-c72ab69a0b02 req-996e5420-a04b-4d46-a806-caa40261fdd1 service nova] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Detach interface failed, port_id=097ab3f0-c9c4-48be-9bcb-78b1068e5192, reason: Instance 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1054.020021] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d14c0226-cd27-450f-bfb5-6a944e653586 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "8740a794-a772-4260-aeb1-51762a586fe2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.067s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1054.146850] env[68674]: DEBUG oslo_vmware.api [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240713, 'name': PowerOffVM_Task, 'duration_secs': 0.243268} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.148309] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1054.148499] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Updating instance '2a7a6269-65a8-402c-b174-a4a46d20a33a' progress to 17 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1054.156390] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "dbbf1313-6e44-45e2-8bf6-83409f06cb4b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1054.156615] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "dbbf1313-6e44-45e2-8bf6-83409f06cb4b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1054.233125] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d12c131d-e137-410d-a3f7-d1e7a33fe3fd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.240598] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88431797-88f4-423d-9de9-36f9ead78df0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.271277] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3294c9b4-aff9-4d41-a6e8-2f29f9def65c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.281855] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14f157fb-7377-4743-a0d7-01955413afb4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.296353] env[68674]: DEBUG nova.compute.provider_tree [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1054.389673] env[68674]: INFO nova.compute.manager [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Took 21.72 seconds to build instance. [ 1054.406285] env[68674]: DEBUG nova.objects.base [None req-1d3e8999-be7b-4a8a-badf-94ce802dc23a tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Object Instance<30731a3c-34ba-40c8-9b8f-2d867eff4f21> lazy-loaded attributes: flavor,info_cache {{(pid=68674) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1054.409281] env[68674]: INFO nova.compute.manager [-] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Took 1.27 seconds to deallocate network for instance. [ 1054.658812] env[68674]: DEBUG nova.virt.hardware [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1054.659089] env[68674]: DEBUG nova.virt.hardware [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1054.659255] env[68674]: DEBUG nova.virt.hardware [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1054.659441] env[68674]: DEBUG nova.virt.hardware [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1054.659591] env[68674]: DEBUG nova.virt.hardware [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1054.659744] env[68674]: DEBUG nova.virt.hardware [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1054.659952] env[68674]: DEBUG nova.virt.hardware [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1054.660132] env[68674]: DEBUG nova.virt.hardware [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1054.660306] env[68674]: DEBUG nova.virt.hardware [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1054.660474] env[68674]: DEBUG nova.virt.hardware [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1054.660647] env[68674]: DEBUG nova.virt.hardware [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1054.665870] env[68674]: DEBUG nova.compute.manager [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1054.668444] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-95e38cb5-f240-4a64-8485-d091f09964bc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.685407] env[68674]: DEBUG oslo_vmware.api [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1054.685407] env[68674]: value = "task-3240714" [ 1054.685407] env[68674]: _type = "Task" [ 1054.685407] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.694195] env[68674]: DEBUG oslo_vmware.api [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240714, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.777545] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5ef93a02-8132-46fd-8110-a27efe81ac66 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "8d810cc0-3f85-49c9-9d7d-8e1711a97015" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1054.777807] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5ef93a02-8132-46fd-8110-a27efe81ac66 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "8d810cc0-3f85-49c9-9d7d-8e1711a97015" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1054.778414] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5ef93a02-8132-46fd-8110-a27efe81ac66 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "8d810cc0-3f85-49c9-9d7d-8e1711a97015-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1054.778749] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5ef93a02-8132-46fd-8110-a27efe81ac66 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "8d810cc0-3f85-49c9-9d7d-8e1711a97015-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1054.778818] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5ef93a02-8132-46fd-8110-a27efe81ac66 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "8d810cc0-3f85-49c9-9d7d-8e1711a97015-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1054.781019] env[68674]: INFO nova.compute.manager [None req-5ef93a02-8132-46fd-8110-a27efe81ac66 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Terminating instance [ 1054.799634] env[68674]: DEBUG nova.scheduler.client.report [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1054.891963] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f58c5c20-cad4-4bd5-b25f-ddb1b499b555 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "bd3ae195-6e01-49d5-9fcf-9520273d9108" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.232s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1054.916061] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e3f72d4c-93b8-4f33-a82d-4a90ff48fc8b tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1055.144597] env[68674]: DEBUG nova.network.neutron [None req-1d3e8999-be7b-4a8a-badf-94ce802dc23a tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Updating instance_info_cache with network_info: [{"id": "fa6ef6fe-e229-4cc2-8230-7318adaa728e", "address": "fa:16:3e:6c:f7:a2", "network": {"id": "f2a6b57a-fec9-4bd2-9828-2b72f21f2393", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1479923638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.236", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f2a133c72064227bd419d63d5d9557f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa6ef6fe-e2", "ovs_interfaceid": "fa6ef6fe-e229-4cc2-8230-7318adaa728e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.195947] env[68674]: DEBUG oslo_vmware.api [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240714, 'name': ReconfigVM_Task, 'duration_secs': 0.451733} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.197181] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Updating instance '2a7a6269-65a8-402c-b174-a4a46d20a33a' progress to 33 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1055.203568] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1055.288075] env[68674]: DEBUG nova.compute.manager [None req-5ef93a02-8132-46fd-8110-a27efe81ac66 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1055.288075] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5ef93a02-8132-46fd-8110-a27efe81ac66 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1055.288075] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6617bf5f-738a-41a8-bb37-091c283859a6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.298086] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ef93a02-8132-46fd-8110-a27efe81ac66 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1055.299096] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bfec7568-90d3-4928-927a-9ac0ad390b93 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.304621] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.829s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1055.305475] env[68674]: DEBUG nova.compute.manager [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1055.310871] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.226s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1055.313090] env[68674]: INFO nova.compute.claims [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1055.316731] env[68674]: DEBUG oslo_vmware.api [None req-5ef93a02-8132-46fd-8110-a27efe81ac66 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1055.316731] env[68674]: value = "task-3240715" [ 1055.316731] env[68674]: _type = "Task" [ 1055.316731] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.325559] env[68674]: DEBUG oslo_vmware.api [None req-5ef93a02-8132-46fd-8110-a27efe81ac66 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240715, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.335419] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d0625b1c-ff75-46df-9ca5-e99561fd97b2 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Acquiring lock "f6f5fb73-521a-4c83-93ea-a1eb2af2e142" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1055.335950] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d0625b1c-ff75-46df-9ca5-e99561fd97b2 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Lock "f6f5fb73-521a-4c83-93ea-a1eb2af2e142" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1055.336328] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d0625b1c-ff75-46df-9ca5-e99561fd97b2 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Acquiring lock "f6f5fb73-521a-4c83-93ea-a1eb2af2e142-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1055.336640] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d0625b1c-ff75-46df-9ca5-e99561fd97b2 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Lock "f6f5fb73-521a-4c83-93ea-a1eb2af2e142-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1055.336937] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d0625b1c-ff75-46df-9ca5-e99561fd97b2 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Lock "f6f5fb73-521a-4c83-93ea-a1eb2af2e142-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1055.339391] env[68674]: INFO nova.compute.manager [None req-d0625b1c-ff75-46df-9ca5-e99561fd97b2 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Terminating instance [ 1055.526274] env[68674]: DEBUG nova.compute.manager [req-cdcde969-f99d-4e61-841a-4971e9af9713 req-9559a65d-4c66-43be-9b48-33859e20fefe service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Received event network-changed-270836ed-f229-45ed-b23b-58f26fa997be {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1055.526550] env[68674]: DEBUG nova.compute.manager [req-cdcde969-f99d-4e61-841a-4971e9af9713 req-9559a65d-4c66-43be-9b48-33859e20fefe service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Refreshing instance network info cache due to event network-changed-270836ed-f229-45ed-b23b-58f26fa997be. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1055.526780] env[68674]: DEBUG oslo_concurrency.lockutils [req-cdcde969-f99d-4e61-841a-4971e9af9713 req-9559a65d-4c66-43be-9b48-33859e20fefe service nova] Acquiring lock "refresh_cache-66f4ab32-ef66-4d1d-93b6-775d59ce3c41" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1055.527658] env[68674]: DEBUG oslo_concurrency.lockutils [req-cdcde969-f99d-4e61-841a-4971e9af9713 req-9559a65d-4c66-43be-9b48-33859e20fefe service nova] Acquired lock "refresh_cache-66f4ab32-ef66-4d1d-93b6-775d59ce3c41" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1055.527892] env[68674]: DEBUG nova.network.neutron [req-cdcde969-f99d-4e61-841a-4971e9af9713 req-9559a65d-4c66-43be-9b48-33859e20fefe service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Refreshing network info cache for port 270836ed-f229-45ed-b23b-58f26fa997be {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1055.647438] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1d3e8999-be7b-4a8a-badf-94ce802dc23a tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Releasing lock "refresh_cache-30731a3c-34ba-40c8-9b8f-2d867eff4f21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1055.705647] env[68674]: DEBUG nova.virt.hardware [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1055.705647] env[68674]: DEBUG nova.virt.hardware [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1055.705647] env[68674]: DEBUG nova.virt.hardware [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1055.705647] env[68674]: DEBUG nova.virt.hardware [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1055.705647] env[68674]: DEBUG nova.virt.hardware [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1055.705842] env[68674]: DEBUG nova.virt.hardware [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1055.705984] env[68674]: DEBUG nova.virt.hardware [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1055.706165] env[68674]: DEBUG nova.virt.hardware [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1055.706343] env[68674]: DEBUG nova.virt.hardware [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1055.706499] env[68674]: DEBUG nova.virt.hardware [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1055.706674] env[68674]: DEBUG nova.virt.hardware [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1055.712426] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Reconfiguring VM instance instance-00000058 to detach disk 2000 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1055.712704] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd071135-378b-4dcd-a822-6c5164162f52 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.731494] env[68674]: DEBUG oslo_vmware.api [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1055.731494] env[68674]: value = "task-3240716" [ 1055.731494] env[68674]: _type = "Task" [ 1055.731494] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.739490] env[68674]: DEBUG oslo_vmware.api [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240716, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.822146] env[68674]: DEBUG nova.compute.utils [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1055.823867] env[68674]: DEBUG nova.compute.manager [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1055.824192] env[68674]: DEBUG nova.network.neutron [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1055.836874] env[68674]: DEBUG oslo_vmware.api [None req-5ef93a02-8132-46fd-8110-a27efe81ac66 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240715, 'name': PowerOffVM_Task, 'duration_secs': 0.328419} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.837172] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ef93a02-8132-46fd-8110-a27efe81ac66 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1055.837356] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5ef93a02-8132-46fd-8110-a27efe81ac66 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1055.837618] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-edcd9cc7-fcf5-47e9-bd34-21827fe7c01e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.843516] env[68674]: DEBUG nova.compute.manager [None req-d0625b1c-ff75-46df-9ca5-e99561fd97b2 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1055.843728] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d0625b1c-ff75-46df-9ca5-e99561fd97b2 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1055.844539] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-884f4ef5-c937-4759-a7c8-f1c59d2fe606 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.851796] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0625b1c-ff75-46df-9ca5-e99561fd97b2 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1055.852043] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4f36fd70-ebe0-49ec-adc5-5f9d1b49a177 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.857735] env[68674]: DEBUG oslo_vmware.api [None req-d0625b1c-ff75-46df-9ca5-e99561fd97b2 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Waiting for the task: (returnval){ [ 1055.857735] env[68674]: value = "task-3240718" [ 1055.857735] env[68674]: _type = "Task" [ 1055.857735] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.866574] env[68674]: DEBUG oslo_vmware.api [None req-d0625b1c-ff75-46df-9ca5-e99561fd97b2 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Task: {'id': task-3240718, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.872209] env[68674]: DEBUG nova.policy [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '31f9bfd8c9034fbb89ea9a1883b3a64e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cd2b4690968b4f54a87770edc8628c75', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 1055.912190] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5ef93a02-8132-46fd-8110-a27efe81ac66 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1055.912468] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5ef93a02-8132-46fd-8110-a27efe81ac66 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1055.912509] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ef93a02-8132-46fd-8110-a27efe81ac66 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Deleting the datastore file [datastore1] 8d810cc0-3f85-49c9-9d7d-8e1711a97015 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1055.914191] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-18df4b7d-92e0-4116-b720-074f8dc7f7d7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.922379] env[68674]: DEBUG oslo_vmware.api [None req-5ef93a02-8132-46fd-8110-a27efe81ac66 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1055.922379] env[68674]: value = "task-3240719" [ 1055.922379] env[68674]: _type = "Task" [ 1055.922379] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.939973] env[68674]: DEBUG oslo_vmware.api [None req-5ef93a02-8132-46fd-8110-a27efe81ac66 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240719, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.242477] env[68674]: DEBUG oslo_vmware.api [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240716, 'name': ReconfigVM_Task, 'duration_secs': 0.216161} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.245032] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Reconfigured VM instance instance-00000058 to detach disk 2000 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1056.245869] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68af608d-4e34-4873-84a4-ecfa591cf84d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.271415] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] 2a7a6269-65a8-402c-b174-a4a46d20a33a/2a7a6269-65a8-402c-b174-a4a46d20a33a.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1056.275020] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d36ec714-b9eb-43c5-8c84-b9a82d9bcc04 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.289873] env[68674]: DEBUG oslo_vmware.api [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1056.289873] env[68674]: value = "task-3240720" [ 1056.289873] env[68674]: _type = "Task" [ 1056.289873] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.298329] env[68674]: DEBUG oslo_vmware.api [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240720, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.330630] env[68674]: DEBUG nova.compute.manager [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1056.371872] env[68674]: DEBUG oslo_vmware.api [None req-d0625b1c-ff75-46df-9ca5-e99561fd97b2 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Task: {'id': task-3240718, 'name': PowerOffVM_Task, 'duration_secs': 0.259123} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.375827] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0625b1c-ff75-46df-9ca5-e99561fd97b2 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1056.376029] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d0625b1c-ff75-46df-9ca5-e99561fd97b2 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1056.376827] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d5d912f9-921f-4cf0-868a-7dd2a4209722 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.435656] env[68674]: DEBUG oslo_vmware.api [None req-5ef93a02-8132-46fd-8110-a27efe81ac66 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240719, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131456} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.435932] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ef93a02-8132-46fd-8110-a27efe81ac66 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1056.436132] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5ef93a02-8132-46fd-8110-a27efe81ac66 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1056.436313] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5ef93a02-8132-46fd-8110-a27efe81ac66 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1056.436494] env[68674]: INFO nova.compute.manager [None req-5ef93a02-8132-46fd-8110-a27efe81ac66 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1056.436739] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5ef93a02-8132-46fd-8110-a27efe81ac66 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1056.436946] env[68674]: DEBUG nova.compute.manager [-] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1056.437052] env[68674]: DEBUG nova.network.neutron [-] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1056.439855] env[68674]: DEBUG nova.network.neutron [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Successfully created port: 67abafc0-ffa5-4032-b312-9314d5c0e03a {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1056.455383] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d0625b1c-ff75-46df-9ca5-e99561fd97b2 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1056.455795] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d0625b1c-ff75-46df-9ca5-e99561fd97b2 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1056.455998] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0625b1c-ff75-46df-9ca5-e99561fd97b2 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Deleting the datastore file [datastore1] f6f5fb73-521a-4c83-93ea-a1eb2af2e142 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1056.456309] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2000dd28-b18e-47dd-9a69-69df27b7683f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.465793] env[68674]: DEBUG oslo_vmware.api [None req-d0625b1c-ff75-46df-9ca5-e99561fd97b2 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Waiting for the task: (returnval){ [ 1056.465793] env[68674]: value = "task-3240722" [ 1056.465793] env[68674]: _type = "Task" [ 1056.465793] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.473857] env[68674]: DEBUG oslo_vmware.api [None req-d0625b1c-ff75-46df-9ca5-e99561fd97b2 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Task: {'id': task-3240722, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.516753] env[68674]: DEBUG nova.network.neutron [req-cdcde969-f99d-4e61-841a-4971e9af9713 req-9559a65d-4c66-43be-9b48-33859e20fefe service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Updated VIF entry in instance network info cache for port 270836ed-f229-45ed-b23b-58f26fa997be. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1056.517160] env[68674]: DEBUG nova.network.neutron [req-cdcde969-f99d-4e61-841a-4971e9af9713 req-9559a65d-4c66-43be-9b48-33859e20fefe service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Updating instance_info_cache with network_info: [{"id": "270836ed-f229-45ed-b23b-58f26fa997be", "address": "fa:16:3e:50:c3:4d", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap270836ed-f2", "ovs_interfaceid": "270836ed-f229-45ed-b23b-58f26fa997be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1056.637791] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25f78c2e-d645-4f64-929e-168bb6c12d7b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.646098] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b6958bb-a936-488e-b908-279e2d305345 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.677098] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d3e8999-be7b-4a8a-badf-94ce802dc23a tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1056.678059] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-89565971-55ec-4c60-a00a-c1f8059b6623 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.680327] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4013bfd7-07ae-4fc9-ac9d-62b6b479a740 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.697789] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6320ecb6-f37c-41f1-99f8-8cc74665dd03 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.701945] env[68674]: DEBUG oslo_vmware.api [None req-1d3e8999-be7b-4a8a-badf-94ce802dc23a tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1056.701945] env[68674]: value = "task-3240723" [ 1056.701945] env[68674]: _type = "Task" [ 1056.701945] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.713516] env[68674]: DEBUG nova.compute.provider_tree [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1056.721599] env[68674]: DEBUG oslo_vmware.api [None req-1d3e8999-be7b-4a8a-badf-94ce802dc23a tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3240723, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.802344] env[68674]: DEBUG oslo_vmware.api [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240720, 'name': ReconfigVM_Task, 'duration_secs': 0.378744} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.802836] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Reconfigured VM instance instance-00000058 to attach disk [datastore1] 2a7a6269-65a8-402c-b174-a4a46d20a33a/2a7a6269-65a8-402c-b174-a4a46d20a33a.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1056.805991] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Updating instance '2a7a6269-65a8-402c-b174-a4a46d20a33a' progress to 50 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1056.904218] env[68674]: DEBUG nova.compute.manager [req-5ce38e11-debd-4f13-acb6-747e6b6ace14 req-c0311f8c-d0df-45e1-ae41-06b93040ed8e service nova] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Received event network-vif-deleted-af3ba195-ad22-4557-b100-2d5c3fc527b2 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1056.904436] env[68674]: INFO nova.compute.manager [req-5ce38e11-debd-4f13-acb6-747e6b6ace14 req-c0311f8c-d0df-45e1-ae41-06b93040ed8e service nova] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Neutron deleted interface af3ba195-ad22-4557-b100-2d5c3fc527b2; detaching it from the instance and deleting it from the info cache [ 1056.904613] env[68674]: DEBUG nova.network.neutron [req-5ce38e11-debd-4f13-acb6-747e6b6ace14 req-c0311f8c-d0df-45e1-ae41-06b93040ed8e service nova] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1056.979804] env[68674]: DEBUG oslo_vmware.api [None req-d0625b1c-ff75-46df-9ca5-e99561fd97b2 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Task: {'id': task-3240722, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.196807} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.980301] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0625b1c-ff75-46df-9ca5-e99561fd97b2 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1056.981234] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d0625b1c-ff75-46df-9ca5-e99561fd97b2 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1056.981234] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d0625b1c-ff75-46df-9ca5-e99561fd97b2 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1056.981234] env[68674]: INFO nova.compute.manager [None req-d0625b1c-ff75-46df-9ca5-e99561fd97b2 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1056.981605] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d0625b1c-ff75-46df-9ca5-e99561fd97b2 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1056.982363] env[68674]: DEBUG nova.compute.manager [-] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1056.982405] env[68674]: DEBUG nova.network.neutron [-] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1057.019949] env[68674]: DEBUG oslo_concurrency.lockutils [req-cdcde969-f99d-4e61-841a-4971e9af9713 req-9559a65d-4c66-43be-9b48-33859e20fefe service nova] Releasing lock "refresh_cache-66f4ab32-ef66-4d1d-93b6-775d59ce3c41" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1057.212143] env[68674]: DEBUG oslo_vmware.api [None req-1d3e8999-be7b-4a8a-badf-94ce802dc23a tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3240723, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.217215] env[68674]: DEBUG nova.scheduler.client.report [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1057.314202] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3cb3932-e076-4228-9fb8-59d8017001f1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.337779] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f458dc4e-0815-4d7a-ad41-e28a58505963 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.341606] env[68674]: DEBUG nova.compute.manager [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1057.361150] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Updating instance '2a7a6269-65a8-402c-b174-a4a46d20a33a' progress to 67 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1057.374123] env[68674]: DEBUG nova.virt.hardware [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1057.374432] env[68674]: DEBUG nova.virt.hardware [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1057.374613] env[68674]: DEBUG nova.virt.hardware [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1057.374804] env[68674]: DEBUG nova.virt.hardware [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1057.374956] env[68674]: DEBUG nova.virt.hardware [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1057.375121] env[68674]: DEBUG nova.virt.hardware [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1057.375333] env[68674]: DEBUG nova.virt.hardware [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1057.375498] env[68674]: DEBUG nova.virt.hardware [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1057.375671] env[68674]: DEBUG nova.virt.hardware [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1057.375977] env[68674]: DEBUG nova.virt.hardware [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1057.376254] env[68674]: DEBUG nova.virt.hardware [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1057.377142] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10470096-de75-4e01-b65e-14fcc4c1caca {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.384077] env[68674]: DEBUG nova.network.neutron [-] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1057.386370] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edc5e20c-a986-4e89-8b0e-7d731f421547 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.407369] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fb7300d1-52a6-4fa4-bc1a-ef3291fca01d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.419110] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c977498d-2cac-49da-8698-2950258ac406 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.454436] env[68674]: DEBUG nova.compute.manager [req-5ce38e11-debd-4f13-acb6-747e6b6ace14 req-c0311f8c-d0df-45e1-ae41-06b93040ed8e service nova] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Detach interface failed, port_id=af3ba195-ad22-4557-b100-2d5c3fc527b2, reason: Instance 8d810cc0-3f85-49c9-9d7d-8e1711a97015 could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1057.555432] env[68674]: DEBUG nova.compute.manager [req-8158c86b-9f58-4a7e-88fb-5e5e0909b2a5 req-024384e8-0a9a-427c-aae6-87a88128acb3 service nova] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Received event network-changed-b3b3ebd7-0f64-4a86-b249-876c5962725c {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1057.555432] env[68674]: DEBUG nova.compute.manager [req-8158c86b-9f58-4a7e-88fb-5e5e0909b2a5 req-024384e8-0a9a-427c-aae6-87a88128acb3 service nova] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Refreshing instance network info cache due to event network-changed-b3b3ebd7-0f64-4a86-b249-876c5962725c. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1057.555599] env[68674]: DEBUG oslo_concurrency.lockutils [req-8158c86b-9f58-4a7e-88fb-5e5e0909b2a5 req-024384e8-0a9a-427c-aae6-87a88128acb3 service nova] Acquiring lock "refresh_cache-bd3ae195-6e01-49d5-9fcf-9520273d9108" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1057.555759] env[68674]: DEBUG oslo_concurrency.lockutils [req-8158c86b-9f58-4a7e-88fb-5e5e0909b2a5 req-024384e8-0a9a-427c-aae6-87a88128acb3 service nova] Acquired lock "refresh_cache-bd3ae195-6e01-49d5-9fcf-9520273d9108" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1057.555906] env[68674]: DEBUG nova.network.neutron [req-8158c86b-9f58-4a7e-88fb-5e5e0909b2a5 req-024384e8-0a9a-427c-aae6-87a88128acb3 service nova] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Refreshing network info cache for port b3b3ebd7-0f64-4a86-b249-876c5962725c {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1057.712612] env[68674]: DEBUG oslo_vmware.api [None req-1d3e8999-be7b-4a8a-badf-94ce802dc23a tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3240723, 'name': PowerOnVM_Task, 'duration_secs': 0.798058} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.712885] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d3e8999-be7b-4a8a-badf-94ce802dc23a tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1057.713511] env[68674]: DEBUG nova.compute.manager [None req-1d3e8999-be7b-4a8a-badf-94ce802dc23a tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1057.713963] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2338cd70-64f4-4406-950f-1760d52db9f7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.725989] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.412s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.725989] env[68674]: DEBUG nova.compute.manager [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1057.725989] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.754s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.727451] env[68674]: INFO nova.compute.claims [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1057.893260] env[68674]: INFO nova.compute.manager [-] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Took 1.46 seconds to deallocate network for instance. [ 1058.147878] env[68674]: DEBUG nova.compute.manager [req-f12f71b9-fc36-4fe7-b71c-d83fb415f63f req-efe604c3-b024-4562-9589-e5af2acb25b9 service nova] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Received event network-vif-plugged-67abafc0-ffa5-4032-b312-9314d5c0e03a {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1058.148251] env[68674]: DEBUG oslo_concurrency.lockutils [req-f12f71b9-fc36-4fe7-b71c-d83fb415f63f req-efe604c3-b024-4562-9589-e5af2acb25b9 service nova] Acquiring lock "cbccde73-b903-47f7-9cbc-f0b376a03435-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1058.148905] env[68674]: DEBUG oslo_concurrency.lockutils [req-f12f71b9-fc36-4fe7-b71c-d83fb415f63f req-efe604c3-b024-4562-9589-e5af2acb25b9 service nova] Lock "cbccde73-b903-47f7-9cbc-f0b376a03435-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1058.149174] env[68674]: DEBUG oslo_concurrency.lockutils [req-f12f71b9-fc36-4fe7-b71c-d83fb415f63f req-efe604c3-b024-4562-9589-e5af2acb25b9 service nova] Lock "cbccde73-b903-47f7-9cbc-f0b376a03435-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1058.149598] env[68674]: DEBUG nova.compute.manager [req-f12f71b9-fc36-4fe7-b71c-d83fb415f63f req-efe604c3-b024-4562-9589-e5af2acb25b9 service nova] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] No waiting events found dispatching network-vif-plugged-67abafc0-ffa5-4032-b312-9314d5c0e03a {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1058.149598] env[68674]: WARNING nova.compute.manager [req-f12f71b9-fc36-4fe7-b71c-d83fb415f63f req-efe604c3-b024-4562-9589-e5af2acb25b9 service nova] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Received unexpected event network-vif-plugged-67abafc0-ffa5-4032-b312-9314d5c0e03a for instance with vm_state building and task_state spawning. [ 1058.160649] env[68674]: DEBUG nova.network.neutron [-] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.231581] env[68674]: DEBUG nova.compute.utils [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1058.233982] env[68674]: DEBUG nova.compute.manager [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1058.234216] env[68674]: DEBUG nova.network.neutron [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1058.296504] env[68674]: DEBUG nova.policy [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ed36a9f9127b4bc7b619f45a45cf7401', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '43f00e26b76347d0bd40df46ac3acbcb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 1058.357997] env[68674]: DEBUG nova.network.neutron [req-8158c86b-9f58-4a7e-88fb-5e5e0909b2a5 req-024384e8-0a9a-427c-aae6-87a88128acb3 service nova] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Updated VIF entry in instance network info cache for port b3b3ebd7-0f64-4a86-b249-876c5962725c. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1058.358413] env[68674]: DEBUG nova.network.neutron [req-8158c86b-9f58-4a7e-88fb-5e5e0909b2a5 req-024384e8-0a9a-427c-aae6-87a88128acb3 service nova] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Updating instance_info_cache with network_info: [{"id": "b3b3ebd7-0f64-4a86-b249-876c5962725c", "address": "fa:16:3e:ca:a0:f1", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3b3ebd7-0f", "ovs_interfaceid": "b3b3ebd7-0f64-4a86-b249-876c5962725c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.383567] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6df51fcc-c89c-4bbd-b820-f694593bb7f9 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "182deaf0-c20a-4041-8f41-81786d6b053e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1058.383829] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6df51fcc-c89c-4bbd-b820-f694593bb7f9 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "182deaf0-c20a-4041-8f41-81786d6b053e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1058.384113] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6df51fcc-c89c-4bbd-b820-f694593bb7f9 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "182deaf0-c20a-4041-8f41-81786d6b053e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1058.384322] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6df51fcc-c89c-4bbd-b820-f694593bb7f9 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "182deaf0-c20a-4041-8f41-81786d6b053e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1058.384492] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6df51fcc-c89c-4bbd-b820-f694593bb7f9 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "182deaf0-c20a-4041-8f41-81786d6b053e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1058.387022] env[68674]: INFO nova.compute.manager [None req-6df51fcc-c89c-4bbd-b820-f694593bb7f9 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Terminating instance [ 1058.401401] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5ef93a02-8132-46fd-8110-a27efe81ac66 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1058.598189] env[68674]: DEBUG nova.network.neutron [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Successfully created port: d5b4166a-1c05-4ad8-a9f4-697517d72f6c {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1058.665809] env[68674]: INFO nova.compute.manager [-] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Took 1.68 seconds to deallocate network for instance. [ 1058.739112] env[68674]: DEBUG nova.compute.manager [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1058.766251] env[68674]: DEBUG nova.network.neutron [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Successfully updated port: 67abafc0-ffa5-4032-b312-9314d5c0e03a {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1058.861153] env[68674]: DEBUG oslo_concurrency.lockutils [req-8158c86b-9f58-4a7e-88fb-5e5e0909b2a5 req-024384e8-0a9a-427c-aae6-87a88128acb3 service nova] Releasing lock "refresh_cache-bd3ae195-6e01-49d5-9fcf-9520273d9108" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1058.861416] env[68674]: DEBUG nova.compute.manager [req-8158c86b-9f58-4a7e-88fb-5e5e0909b2a5 req-024384e8-0a9a-427c-aae6-87a88128acb3 service nova] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Received event network-changed-b3b3ebd7-0f64-4a86-b249-876c5962725c {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1058.861592] env[68674]: DEBUG nova.compute.manager [req-8158c86b-9f58-4a7e-88fb-5e5e0909b2a5 req-024384e8-0a9a-427c-aae6-87a88128acb3 service nova] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Refreshing instance network info cache due to event network-changed-b3b3ebd7-0f64-4a86-b249-876c5962725c. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1058.861805] env[68674]: DEBUG oslo_concurrency.lockutils [req-8158c86b-9f58-4a7e-88fb-5e5e0909b2a5 req-024384e8-0a9a-427c-aae6-87a88128acb3 service nova] Acquiring lock "refresh_cache-bd3ae195-6e01-49d5-9fcf-9520273d9108" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.861954] env[68674]: DEBUG oslo_concurrency.lockutils [req-8158c86b-9f58-4a7e-88fb-5e5e0909b2a5 req-024384e8-0a9a-427c-aae6-87a88128acb3 service nova] Acquired lock "refresh_cache-bd3ae195-6e01-49d5-9fcf-9520273d9108" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1058.862185] env[68674]: DEBUG nova.network.neutron [req-8158c86b-9f58-4a7e-88fb-5e5e0909b2a5 req-024384e8-0a9a-427c-aae6-87a88128acb3 service nova] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Refreshing network info cache for port b3b3ebd7-0f64-4a86-b249-876c5962725c {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1058.890603] env[68674]: DEBUG nova.compute.manager [None req-6df51fcc-c89c-4bbd-b820-f694593bb7f9 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1058.890850] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6df51fcc-c89c-4bbd-b820-f694593bb7f9 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1058.892492] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4da6b46d-4079-4e80-9527-2dfad92e4309 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.902370] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6df51fcc-c89c-4bbd-b820-f694593bb7f9 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1058.903568] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fd0917bf-0173-4e4b-a581-85c77fd5066a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.910065] env[68674]: DEBUG oslo_vmware.api [None req-6df51fcc-c89c-4bbd-b820-f694593bb7f9 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1058.910065] env[68674]: value = "task-3240724" [ 1058.910065] env[68674]: _type = "Task" [ 1058.910065] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.922018] env[68674]: DEBUG oslo_vmware.api [None req-6df51fcc-c89c-4bbd-b820-f694593bb7f9 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240724, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.977365] env[68674]: DEBUG nova.compute.manager [req-8fe23fb1-edd4-4d6c-a8cb-b871fc67814b req-cb2c4dd7-74df-41ce-b236-e29da017ea1d service nova] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Received event network-vif-deleted-a90f2990-e03b-4fce-b01d-d1fe57be7c57 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1058.977573] env[68674]: DEBUG nova.compute.manager [req-8fe23fb1-edd4-4d6c-a8cb-b871fc67814b req-cb2c4dd7-74df-41ce-b236-e29da017ea1d service nova] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Received event network-changed-67abafc0-ffa5-4032-b312-9314d5c0e03a {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1058.977739] env[68674]: DEBUG nova.compute.manager [req-8fe23fb1-edd4-4d6c-a8cb-b871fc67814b req-cb2c4dd7-74df-41ce-b236-e29da017ea1d service nova] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Refreshing instance network info cache due to event network-changed-67abafc0-ffa5-4032-b312-9314d5c0e03a. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1058.977956] env[68674]: DEBUG oslo_concurrency.lockutils [req-8fe23fb1-edd4-4d6c-a8cb-b871fc67814b req-cb2c4dd7-74df-41ce-b236-e29da017ea1d service nova] Acquiring lock "refresh_cache-cbccde73-b903-47f7-9cbc-f0b376a03435" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.978115] env[68674]: DEBUG oslo_concurrency.lockutils [req-8fe23fb1-edd4-4d6c-a8cb-b871fc67814b req-cb2c4dd7-74df-41ce-b236-e29da017ea1d service nova] Acquired lock "refresh_cache-cbccde73-b903-47f7-9cbc-f0b376a03435" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1058.978278] env[68674]: DEBUG nova.network.neutron [req-8fe23fb1-edd4-4d6c-a8cb-b871fc67814b req-cb2c4dd7-74df-41ce-b236-e29da017ea1d service nova] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Refreshing network info cache for port 67abafc0-ffa5-4032-b312-9314d5c0e03a {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1059.027673] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21171586-9893-471a-8ce3-03806bc63dfa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.038035] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dd567da-5433-406e-b6e6-080ab3f09aee {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.041107] env[68674]: DEBUG nova.network.neutron [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Port 4fae8d88-2aaa-48bd-b0c4-72bc768efce3 binding to destination host cpu-1 is already ACTIVE {{(pid=68674) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1059.075739] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b1c5cb6-be58-4340-a641-9607a054bf09 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.085560] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d15ff125-e22b-4a22-94ee-1bd0e6c664fa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.102297] env[68674]: DEBUG nova.compute.provider_tree [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1059.171682] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d0625b1c-ff75-46df-9ca5-e99561fd97b2 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1059.270420] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Acquiring lock "refresh_cache-cbccde73-b903-47f7-9cbc-f0b376a03435" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1059.420067] env[68674]: DEBUG oslo_vmware.api [None req-6df51fcc-c89c-4bbd-b820-f694593bb7f9 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240724, 'name': PowerOffVM_Task, 'duration_secs': 0.261232} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.420225] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6df51fcc-c89c-4bbd-b820-f694593bb7f9 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1059.420334] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6df51fcc-c89c-4bbd-b820-f694593bb7f9 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1059.420584] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-04661a31-9b0c-4310-94dd-d95084c071ae {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.491879] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6df51fcc-c89c-4bbd-b820-f694593bb7f9 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1059.491879] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6df51fcc-c89c-4bbd-b820-f694593bb7f9 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1059.492126] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-6df51fcc-c89c-4bbd-b820-f694593bb7f9 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Deleting the datastore file [datastore2] 182deaf0-c20a-4041-8f41-81786d6b053e {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1059.492348] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-901036ca-786b-43ef-9bbd-0988d010f493 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.499372] env[68674]: DEBUG oslo_vmware.api [None req-6df51fcc-c89c-4bbd-b820-f694593bb7f9 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1059.499372] env[68674]: value = "task-3240726" [ 1059.499372] env[68674]: _type = "Task" [ 1059.499372] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.507350] env[68674]: DEBUG oslo_vmware.api [None req-6df51fcc-c89c-4bbd-b820-f694593bb7f9 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240726, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.523637] env[68674]: DEBUG nova.network.neutron [req-8fe23fb1-edd4-4d6c-a8cb-b871fc67814b req-cb2c4dd7-74df-41ce-b236-e29da017ea1d service nova] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1059.595801] env[68674]: DEBUG oslo_concurrency.lockutils [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "2a7a6269-65a8-402c-b174-a4a46d20a33a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1059.596038] env[68674]: DEBUG oslo_concurrency.lockutils [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "2a7a6269-65a8-402c-b174-a4a46d20a33a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1059.596222] env[68674]: DEBUG oslo_concurrency.lockutils [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "2a7a6269-65a8-402c-b174-a4a46d20a33a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1059.605256] env[68674]: DEBUG nova.scheduler.client.report [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1059.619519] env[68674]: DEBUG nova.network.neutron [req-8158c86b-9f58-4a7e-88fb-5e5e0909b2a5 req-024384e8-0a9a-427c-aae6-87a88128acb3 service nova] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Updated VIF entry in instance network info cache for port b3b3ebd7-0f64-4a86-b249-876c5962725c. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1059.619886] env[68674]: DEBUG nova.network.neutron [req-8158c86b-9f58-4a7e-88fb-5e5e0909b2a5 req-024384e8-0a9a-427c-aae6-87a88128acb3 service nova] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Updating instance_info_cache with network_info: [{"id": "b3b3ebd7-0f64-4a86-b249-876c5962725c", "address": "fa:16:3e:ca:a0:f1", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3b3ebd7-0f", "ovs_interfaceid": "b3b3ebd7-0f64-4a86-b249-876c5962725c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1059.641287] env[68674]: DEBUG nova.network.neutron [req-8fe23fb1-edd4-4d6c-a8cb-b871fc67814b req-cb2c4dd7-74df-41ce-b236-e29da017ea1d service nova] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1059.752766] env[68674]: DEBUG nova.compute.manager [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1059.788046] env[68674]: DEBUG nova.virt.hardware [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1059.788319] env[68674]: DEBUG nova.virt.hardware [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1059.789029] env[68674]: DEBUG nova.virt.hardware [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1059.789029] env[68674]: DEBUG nova.virt.hardware [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1059.789029] env[68674]: DEBUG nova.virt.hardware [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1059.789029] env[68674]: DEBUG nova.virt.hardware [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1059.789221] env[68674]: DEBUG nova.virt.hardware [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1059.789325] env[68674]: DEBUG nova.virt.hardware [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1059.789498] env[68674]: DEBUG nova.virt.hardware [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1059.789664] env[68674]: DEBUG nova.virt.hardware [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1059.789839] env[68674]: DEBUG nova.virt.hardware [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1059.790769] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f61b4876-b054-4b5c-9741-6da3c89f562b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.801373] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd8714b9-3294-4d87-bcef-9876f63f9499 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.009918] env[68674]: DEBUG oslo_vmware.api [None req-6df51fcc-c89c-4bbd-b820-f694593bb7f9 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240726, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14816} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.010213] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-6df51fcc-c89c-4bbd-b820-f694593bb7f9 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1060.010408] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6df51fcc-c89c-4bbd-b820-f694593bb7f9 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1060.010591] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6df51fcc-c89c-4bbd-b820-f694593bb7f9 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1060.010771] env[68674]: INFO nova.compute.manager [None req-6df51fcc-c89c-4bbd-b820-f694593bb7f9 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1060.011035] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6df51fcc-c89c-4bbd-b820-f694593bb7f9 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1060.011239] env[68674]: DEBUG nova.compute.manager [-] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1060.011594] env[68674]: DEBUG nova.network.neutron [-] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1060.110206] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.384s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1060.110731] env[68674]: DEBUG nova.compute.manager [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1060.117017] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 7.282s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1060.117017] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1060.117017] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68674) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1060.117017] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e3f72d4c-93b8-4f33-a82d-4a90ff48fc8b tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.200s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1060.117017] env[68674]: DEBUG nova.objects.instance [None req-e3f72d4c-93b8-4f33-a82d-4a90ff48fc8b tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lazy-loading 'resources' on Instance uuid 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1060.117687] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9998a28-0981-4f53-98a5-69a4f6b90ed6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.121742] env[68674]: DEBUG oslo_concurrency.lockutils [req-8158c86b-9f58-4a7e-88fb-5e5e0909b2a5 req-024384e8-0a9a-427c-aae6-87a88128acb3 service nova] Releasing lock "refresh_cache-bd3ae195-6e01-49d5-9fcf-9520273d9108" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1060.121964] env[68674]: DEBUG nova.compute.manager [req-8158c86b-9f58-4a7e-88fb-5e5e0909b2a5 req-024384e8-0a9a-427c-aae6-87a88128acb3 service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Received event network-changed-270836ed-f229-45ed-b23b-58f26fa997be {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1060.122156] env[68674]: DEBUG nova.compute.manager [req-8158c86b-9f58-4a7e-88fb-5e5e0909b2a5 req-024384e8-0a9a-427c-aae6-87a88128acb3 service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Refreshing instance network info cache due to event network-changed-270836ed-f229-45ed-b23b-58f26fa997be. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1060.122352] env[68674]: DEBUG oslo_concurrency.lockutils [req-8158c86b-9f58-4a7e-88fb-5e5e0909b2a5 req-024384e8-0a9a-427c-aae6-87a88128acb3 service nova] Acquiring lock "refresh_cache-66f4ab32-ef66-4d1d-93b6-775d59ce3c41" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1060.122490] env[68674]: DEBUG oslo_concurrency.lockutils [req-8158c86b-9f58-4a7e-88fb-5e5e0909b2a5 req-024384e8-0a9a-427c-aae6-87a88128acb3 service nova] Acquired lock "refresh_cache-66f4ab32-ef66-4d1d-93b6-775d59ce3c41" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1060.122648] env[68674]: DEBUG nova.network.neutron [req-8158c86b-9f58-4a7e-88fb-5e5e0909b2a5 req-024384e8-0a9a-427c-aae6-87a88128acb3 service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Refreshing network info cache for port 270836ed-f229-45ed-b23b-58f26fa997be {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1060.129476] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37ed2269-874c-4c30-b3ee-26f778277dcd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.146026] env[68674]: DEBUG oslo_concurrency.lockutils [req-8fe23fb1-edd4-4d6c-a8cb-b871fc67814b req-cb2c4dd7-74df-41ce-b236-e29da017ea1d service nova] Releasing lock "refresh_cache-cbccde73-b903-47f7-9cbc-f0b376a03435" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1060.146026] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Acquired lock "refresh_cache-cbccde73-b903-47f7-9cbc-f0b376a03435" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1060.146188] env[68674]: DEBUG nova.network.neutron [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1060.149375] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f876bbaa-fa8e-4680-919b-6b49fb9dff99 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.157886] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fe9bd6a-1f61-4d8b-a6fc-99c9803725aa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.188399] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179390MB free_disk=120GB free_vcpus=48 pci_devices=None {{(pid=68674) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1060.188683] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1060.464902] env[68674]: DEBUG nova.network.neutron [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Successfully updated port: d5b4166a-1c05-4ad8-a9f4-697517d72f6c {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1060.618610] env[68674]: DEBUG nova.compute.utils [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1060.619630] env[68674]: DEBUG nova.compute.manager [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1060.619801] env[68674]: DEBUG nova.network.neutron [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1060.657736] env[68674]: DEBUG oslo_concurrency.lockutils [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "refresh_cache-2a7a6269-65a8-402c-b174-a4a46d20a33a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1060.657817] env[68674]: DEBUG oslo_concurrency.lockutils [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquired lock "refresh_cache-2a7a6269-65a8-402c-b174-a4a46d20a33a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1060.657943] env[68674]: DEBUG nova.network.neutron [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1060.704156] env[68674]: DEBUG nova.network.neutron [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1060.712465] env[68674]: DEBUG nova.policy [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e07f15f69a9e482784c39b9c7cda8a8c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2bca98e5a30741249b1bdee899ffe433', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 1060.874444] env[68674]: DEBUG nova.network.neutron [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Updating instance_info_cache with network_info: [{"id": "67abafc0-ffa5-4032-b312-9314d5c0e03a", "address": "fa:16:3e:b0:54:1d", "network": {"id": "a123b66b-623f-49df-8ff0-67d7dfca6f57", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1805484768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cd2b4690968b4f54a87770edc8628c75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b94712a6-b777-47dd-bc06-f9acfce2d936", "external-id": "nsx-vlan-transportzone-494", "segmentation_id": 494, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67abafc0-ff", "ovs_interfaceid": "67abafc0-ffa5-4032-b312-9314d5c0e03a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.885238] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcc5eb1b-1e6d-40b3-9b9c-5d9a60336e5b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.895300] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d93fa52-85d1-4832-a36e-6af4a3d0d5bd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.927251] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33fec985-1fe8-4182-a93e-9e11dab2f346 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.937527] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb3362b3-7245-4659-8f51-eb6f124c48f9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.957948] env[68674]: DEBUG nova.compute.provider_tree [None req-e3f72d4c-93b8-4f33-a82d-4a90ff48fc8b tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1060.968653] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Acquiring lock "refresh_cache-4214e971-ca72-4c9f-a355-78e5ad8d8219" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1060.968797] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Acquired lock "refresh_cache-4214e971-ca72-4c9f-a355-78e5ad8d8219" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1060.968945] env[68674]: DEBUG nova.network.neutron [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1061.010919] env[68674]: DEBUG nova.network.neutron [req-8158c86b-9f58-4a7e-88fb-5e5e0909b2a5 req-024384e8-0a9a-427c-aae6-87a88128acb3 service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Updated VIF entry in instance network info cache for port 270836ed-f229-45ed-b23b-58f26fa997be. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1061.011380] env[68674]: DEBUG nova.network.neutron [req-8158c86b-9f58-4a7e-88fb-5e5e0909b2a5 req-024384e8-0a9a-427c-aae6-87a88128acb3 service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Updating instance_info_cache with network_info: [{"id": "270836ed-f229-45ed-b23b-58f26fa997be", "address": "fa:16:3e:50:c3:4d", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap270836ed-f2", "ovs_interfaceid": "270836ed-f229-45ed-b23b-58f26fa997be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1061.033544] env[68674]: DEBUG nova.compute.manager [req-b259d6de-f42b-476d-a122-36c087fb30dc req-43b0baf5-00c7-43f4-8313-b2bc33d439e0 service nova] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Received event network-vif-plugged-d5b4166a-1c05-4ad8-a9f4-697517d72f6c {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1061.033544] env[68674]: DEBUG oslo_concurrency.lockutils [req-b259d6de-f42b-476d-a122-36c087fb30dc req-43b0baf5-00c7-43f4-8313-b2bc33d439e0 service nova] Acquiring lock "4214e971-ca72-4c9f-a355-78e5ad8d8219-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1061.033544] env[68674]: DEBUG oslo_concurrency.lockutils [req-b259d6de-f42b-476d-a122-36c087fb30dc req-43b0baf5-00c7-43f4-8313-b2bc33d439e0 service nova] Lock "4214e971-ca72-4c9f-a355-78e5ad8d8219-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1061.033544] env[68674]: DEBUG oslo_concurrency.lockutils [req-b259d6de-f42b-476d-a122-36c087fb30dc req-43b0baf5-00c7-43f4-8313-b2bc33d439e0 service nova] Lock "4214e971-ca72-4c9f-a355-78e5ad8d8219-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1061.033544] env[68674]: DEBUG nova.compute.manager [req-b259d6de-f42b-476d-a122-36c087fb30dc req-43b0baf5-00c7-43f4-8313-b2bc33d439e0 service nova] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] No waiting events found dispatching network-vif-plugged-d5b4166a-1c05-4ad8-a9f4-697517d72f6c {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1061.033544] env[68674]: WARNING nova.compute.manager [req-b259d6de-f42b-476d-a122-36c087fb30dc req-43b0baf5-00c7-43f4-8313-b2bc33d439e0 service nova] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Received unexpected event network-vif-plugged-d5b4166a-1c05-4ad8-a9f4-697517d72f6c for instance with vm_state building and task_state spawning. [ 1061.033544] env[68674]: DEBUG nova.compute.manager [req-b259d6de-f42b-476d-a122-36c087fb30dc req-43b0baf5-00c7-43f4-8313-b2bc33d439e0 service nova] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Received event network-changed-d5b4166a-1c05-4ad8-a9f4-697517d72f6c {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1061.033544] env[68674]: DEBUG nova.compute.manager [req-b259d6de-f42b-476d-a122-36c087fb30dc req-43b0baf5-00c7-43f4-8313-b2bc33d439e0 service nova] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Refreshing instance network info cache due to event network-changed-d5b4166a-1c05-4ad8-a9f4-697517d72f6c. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1061.033544] env[68674]: DEBUG oslo_concurrency.lockutils [req-b259d6de-f42b-476d-a122-36c087fb30dc req-43b0baf5-00c7-43f4-8313-b2bc33d439e0 service nova] Acquiring lock "refresh_cache-4214e971-ca72-4c9f-a355-78e5ad8d8219" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.130673] env[68674]: DEBUG nova.compute.manager [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1061.136318] env[68674]: DEBUG nova.network.neutron [-] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1061.159230] env[68674]: DEBUG nova.network.neutron [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Successfully created port: 7309bd2b-c077-4257-8efb-bf6e8d516ab7 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1061.378251] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Releasing lock "refresh_cache-cbccde73-b903-47f7-9cbc-f0b376a03435" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1061.378583] env[68674]: DEBUG nova.compute.manager [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Instance network_info: |[{"id": "67abafc0-ffa5-4032-b312-9314d5c0e03a", "address": "fa:16:3e:b0:54:1d", "network": {"id": "a123b66b-623f-49df-8ff0-67d7dfca6f57", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1805484768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cd2b4690968b4f54a87770edc8628c75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b94712a6-b777-47dd-bc06-f9acfce2d936", "external-id": "nsx-vlan-transportzone-494", "segmentation_id": 494, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67abafc0-ff", "ovs_interfaceid": "67abafc0-ffa5-4032-b312-9314d5c0e03a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1061.379021] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b0:54:1d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b94712a6-b777-47dd-bc06-f9acfce2d936', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '67abafc0-ffa5-4032-b312-9314d5c0e03a', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1061.386920] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Creating folder: Project (cd2b4690968b4f54a87770edc8628c75). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1061.387356] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7b23c73c-1676-4feb-9416-7d680091ab32 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.398094] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Created folder: Project (cd2b4690968b4f54a87770edc8628c75) in parent group-v647377. [ 1061.398318] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Creating folder: Instances. Parent ref: group-v647672. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1061.398601] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-113cdd73-3a60-4872-aee0-d9692b7efafd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.410158] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Created folder: Instances in parent group-v647672. [ 1061.410318] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1061.410520] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1061.410734] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6f18fad7-b253-410d-a969-ea1228567c5c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.429154] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1061.429154] env[68674]: value = "task-3240729" [ 1061.429154] env[68674]: _type = "Task" [ 1061.429154] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.437585] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240729, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.464363] env[68674]: DEBUG nova.scheduler.client.report [None req-e3f72d4c-93b8-4f33-a82d-4a90ff48fc8b tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1061.509208] env[68674]: DEBUG nova.network.neutron [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1061.514229] env[68674]: DEBUG oslo_concurrency.lockutils [req-8158c86b-9f58-4a7e-88fb-5e5e0909b2a5 req-024384e8-0a9a-427c-aae6-87a88128acb3 service nova] Releasing lock "refresh_cache-66f4ab32-ef66-4d1d-93b6-775d59ce3c41" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1061.553145] env[68674]: DEBUG nova.network.neutron [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Updating instance_info_cache with network_info: [{"id": "4fae8d88-2aaa-48bd-b0c4-72bc768efce3", "address": "fa:16:3e:f8:a7:c2", "network": {"id": "cd9a6296-fa96-4117-b8b5-3884d0d16745", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1543887384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61ea6bfeb37d470a970e9c98e4827ade", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fae8d88-2a", "ovs_interfaceid": "4fae8d88-2aaa-48bd-b0c4-72bc768efce3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1061.643402] env[68674]: INFO nova.compute.manager [-] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Took 1.63 seconds to deallocate network for instance. [ 1061.649850] env[68674]: DEBUG nova.network.neutron [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Updating instance_info_cache with network_info: [{"id": "d5b4166a-1c05-4ad8-a9f4-697517d72f6c", "address": "fa:16:3e:c3:1f:45", "network": {"id": "1b0c763a-1b06-4dfb-9376-f9d411619180", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1366824526-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "43f00e26b76347d0bd40df46ac3acbcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "939c05b6-8f31-4f3a-95ac-6297e0bd243e", "external-id": "nsx-vlan-transportzone-825", "segmentation_id": 825, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5b4166a-1c", "ovs_interfaceid": "d5b4166a-1c05-4ad8-a9f4-697517d72f6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1061.940026] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240729, 'name': CreateVM_Task, 'duration_secs': 0.40058} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.940189] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1061.940883] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.941070] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1061.941409] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1061.941685] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1446ac56-add8-48a9-937a-3f203a63623f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.946161] env[68674]: DEBUG oslo_vmware.api [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Waiting for the task: (returnval){ [ 1061.946161] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52fd469d-8684-781f-1cbb-33b29cdad8da" [ 1061.946161] env[68674]: _type = "Task" [ 1061.946161] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.953600] env[68674]: DEBUG oslo_vmware.api [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52fd469d-8684-781f-1cbb-33b29cdad8da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.967663] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e3f72d4c-93b8-4f33-a82d-4a90ff48fc8b tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.852s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1061.970527] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.767s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1061.972051] env[68674]: INFO nova.compute.claims [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1061.989447] env[68674]: INFO nova.scheduler.client.report [None req-e3f72d4c-93b8-4f33-a82d-4a90ff48fc8b tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Deleted allocations for instance 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc [ 1062.058060] env[68674]: DEBUG oslo_concurrency.lockutils [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Releasing lock "refresh_cache-2a7a6269-65a8-402c-b174-a4a46d20a33a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1062.146042] env[68674]: DEBUG nova.compute.manager [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1062.148842] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6df51fcc-c89c-4bbd-b820-f694593bb7f9 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1062.151357] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Releasing lock "refresh_cache-4214e971-ca72-4c9f-a355-78e5ad8d8219" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1062.151649] env[68674]: DEBUG nova.compute.manager [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Instance network_info: |[{"id": "d5b4166a-1c05-4ad8-a9f4-697517d72f6c", "address": "fa:16:3e:c3:1f:45", "network": {"id": "1b0c763a-1b06-4dfb-9376-f9d411619180", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1366824526-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "43f00e26b76347d0bd40df46ac3acbcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "939c05b6-8f31-4f3a-95ac-6297e0bd243e", "external-id": "nsx-vlan-transportzone-825", "segmentation_id": 825, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5b4166a-1c", "ovs_interfaceid": "d5b4166a-1c05-4ad8-a9f4-697517d72f6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1062.151915] env[68674]: DEBUG oslo_concurrency.lockutils [req-b259d6de-f42b-476d-a122-36c087fb30dc req-43b0baf5-00c7-43f4-8313-b2bc33d439e0 service nova] Acquired lock "refresh_cache-4214e971-ca72-4c9f-a355-78e5ad8d8219" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1062.152147] env[68674]: DEBUG nova.network.neutron [req-b259d6de-f42b-476d-a122-36c087fb30dc req-43b0baf5-00c7-43f4-8313-b2bc33d439e0 service nova] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Refreshing network info cache for port d5b4166a-1c05-4ad8-a9f4-697517d72f6c {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1062.154260] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c3:1f:45', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '939c05b6-8f31-4f3a-95ac-6297e0bd243e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd5b4166a-1c05-4ad8-a9f4-697517d72f6c', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1062.162314] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1062.165236] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1062.167410] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-52491b4b-3167-42ff-be6d-8b36e29837eb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.188890] env[68674]: DEBUG nova.virt.hardware [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1062.188890] env[68674]: DEBUG nova.virt.hardware [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1062.188890] env[68674]: DEBUG nova.virt.hardware [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1062.188890] env[68674]: DEBUG nova.virt.hardware [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1062.189152] env[68674]: DEBUG nova.virt.hardware [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1062.189152] env[68674]: DEBUG nova.virt.hardware [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1062.189353] env[68674]: DEBUG nova.virt.hardware [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1062.189510] env[68674]: DEBUG nova.virt.hardware [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1062.189681] env[68674]: DEBUG nova.virt.hardware [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1062.189848] env[68674]: DEBUG nova.virt.hardware [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1062.190020] env[68674]: DEBUG nova.virt.hardware [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1062.190992] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dca10e8-4e82-4c4c-93b3-07ceee52ca2f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.196335] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1062.196335] env[68674]: value = "task-3240730" [ 1062.196335] env[68674]: _type = "Task" [ 1062.196335] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.203105] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f0e5d73-9702-4c8a-9218-6ba9460da1c2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.209499] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240730, 'name': CreateVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.384027] env[68674]: DEBUG nova.network.neutron [req-b259d6de-f42b-476d-a122-36c087fb30dc req-43b0baf5-00c7-43f4-8313-b2bc33d439e0 service nova] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Updated VIF entry in instance network info cache for port d5b4166a-1c05-4ad8-a9f4-697517d72f6c. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1062.384391] env[68674]: DEBUG nova.network.neutron [req-b259d6de-f42b-476d-a122-36c087fb30dc req-43b0baf5-00c7-43f4-8313-b2bc33d439e0 service nova] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Updating instance_info_cache with network_info: [{"id": "d5b4166a-1c05-4ad8-a9f4-697517d72f6c", "address": "fa:16:3e:c3:1f:45", "network": {"id": "1b0c763a-1b06-4dfb-9376-f9d411619180", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1366824526-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "43f00e26b76347d0bd40df46ac3acbcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "939c05b6-8f31-4f3a-95ac-6297e0bd243e", "external-id": "nsx-vlan-transportzone-825", "segmentation_id": 825, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5b4166a-1c", "ovs_interfaceid": "d5b4166a-1c05-4ad8-a9f4-697517d72f6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1062.456524] env[68674]: DEBUG oslo_vmware.api [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52fd469d-8684-781f-1cbb-33b29cdad8da, 'name': SearchDatastore_Task, 'duration_secs': 0.009142} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.456839] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1062.457091] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1062.457349] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.457529] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1062.457674] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1062.457930] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b10fa0df-37c9-48ec-b2e8-37342791004a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.466084] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1062.466269] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1062.466960] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4048c60-d762-4ba7-84bf-db2f66b3af0a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.472159] env[68674]: DEBUG oslo_vmware.api [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Waiting for the task: (returnval){ [ 1062.472159] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b12438-8df6-cf09-4251-8fbd39c5ac55" [ 1062.472159] env[68674]: _type = "Task" [ 1062.472159] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.481877] env[68674]: DEBUG oslo_vmware.api [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b12438-8df6-cf09-4251-8fbd39c5ac55, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.498758] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e3f72d4c-93b8-4f33-a82d-4a90ff48fc8b tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "6dc530e4-fb03-45dc-8d70-9f0e8731dfdc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.983s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1062.567779] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-919d9b61-031c-41c4-a228-cbb0c1cdf96b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.575389] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5a601f0-663a-4703-aee0-e555e0844c18 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.705965] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240730, 'name': CreateVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.840614] env[68674]: DEBUG nova.network.neutron [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Successfully updated port: 7309bd2b-c077-4257-8efb-bf6e8d516ab7 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1062.886665] env[68674]: DEBUG oslo_concurrency.lockutils [req-b259d6de-f42b-476d-a122-36c087fb30dc req-43b0baf5-00c7-43f4-8313-b2bc33d439e0 service nova] Releasing lock "refresh_cache-4214e971-ca72-4c9f-a355-78e5ad8d8219" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1062.887260] env[68674]: DEBUG nova.compute.manager [req-b259d6de-f42b-476d-a122-36c087fb30dc req-43b0baf5-00c7-43f4-8313-b2bc33d439e0 service nova] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Received event network-vif-deleted-4227b11c-e4da-42b5-80c0-af15c0b4de7f {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1062.887641] env[68674]: INFO nova.compute.manager [req-b259d6de-f42b-476d-a122-36c087fb30dc req-43b0baf5-00c7-43f4-8313-b2bc33d439e0 service nova] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Neutron deleted interface 4227b11c-e4da-42b5-80c0-af15c0b4de7f; detaching it from the instance and deleting it from the info cache [ 1062.888037] env[68674]: DEBUG nova.network.neutron [req-b259d6de-f42b-476d-a122-36c087fb30dc req-43b0baf5-00c7-43f4-8313-b2bc33d439e0 service nova] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1062.984648] env[68674]: DEBUG oslo_vmware.api [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52b12438-8df6-cf09-4251-8fbd39c5ac55, 'name': SearchDatastore_Task, 'duration_secs': 0.008741} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.985501] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2c4e3fc-a053-4b5e-98e8-195c53576f1c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.991118] env[68674]: DEBUG oslo_vmware.api [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Waiting for the task: (returnval){ [ 1062.991118] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525567e7-2850-ff7e-d3e4-7b45538f8fc0" [ 1062.991118] env[68674]: _type = "Task" [ 1062.991118] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.000714] env[68674]: DEBUG oslo_vmware.api [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525567e7-2850-ff7e-d3e4-7b45538f8fc0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.098055] env[68674]: DEBUG nova.compute.manager [req-d334c2f8-fddd-4f02-9f72-0af94c066ea8 req-0f956308-82cd-4420-99b4-3d0ebe1580d8 service nova] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Received event network-vif-plugged-7309bd2b-c077-4257-8efb-bf6e8d516ab7 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1063.098248] env[68674]: DEBUG oslo_concurrency.lockutils [req-d334c2f8-fddd-4f02-9f72-0af94c066ea8 req-0f956308-82cd-4420-99b4-3d0ebe1580d8 service nova] Acquiring lock "ba4bfbb4-a89b-4ab6-964e-792647fd5a89-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1063.098455] env[68674]: DEBUG oslo_concurrency.lockutils [req-d334c2f8-fddd-4f02-9f72-0af94c066ea8 req-0f956308-82cd-4420-99b4-3d0ebe1580d8 service nova] Lock "ba4bfbb4-a89b-4ab6-964e-792647fd5a89-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1063.098618] env[68674]: DEBUG oslo_concurrency.lockutils [req-d334c2f8-fddd-4f02-9f72-0af94c066ea8 req-0f956308-82cd-4420-99b4-3d0ebe1580d8 service nova] Lock "ba4bfbb4-a89b-4ab6-964e-792647fd5a89-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1063.098815] env[68674]: DEBUG nova.compute.manager [req-d334c2f8-fddd-4f02-9f72-0af94c066ea8 req-0f956308-82cd-4420-99b4-3d0ebe1580d8 service nova] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] No waiting events found dispatching network-vif-plugged-7309bd2b-c077-4257-8efb-bf6e8d516ab7 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1063.098958] env[68674]: WARNING nova.compute.manager [req-d334c2f8-fddd-4f02-9f72-0af94c066ea8 req-0f956308-82cd-4420-99b4-3d0ebe1580d8 service nova] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Received unexpected event network-vif-plugged-7309bd2b-c077-4257-8efb-bf6e8d516ab7 for instance with vm_state building and task_state spawning. [ 1063.099147] env[68674]: DEBUG nova.compute.manager [req-d334c2f8-fddd-4f02-9f72-0af94c066ea8 req-0f956308-82cd-4420-99b4-3d0ebe1580d8 service nova] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Received event network-changed-7309bd2b-c077-4257-8efb-bf6e8d516ab7 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1063.099329] env[68674]: DEBUG nova.compute.manager [req-d334c2f8-fddd-4f02-9f72-0af94c066ea8 req-0f956308-82cd-4420-99b4-3d0ebe1580d8 service nova] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Refreshing instance network info cache due to event network-changed-7309bd2b-c077-4257-8efb-bf6e8d516ab7. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1063.099634] env[68674]: DEBUG oslo_concurrency.lockutils [req-d334c2f8-fddd-4f02-9f72-0af94c066ea8 req-0f956308-82cd-4420-99b4-3d0ebe1580d8 service nova] Acquiring lock "refresh_cache-ba4bfbb4-a89b-4ab6-964e-792647fd5a89" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.099718] env[68674]: DEBUG oslo_concurrency.lockutils [req-d334c2f8-fddd-4f02-9f72-0af94c066ea8 req-0f956308-82cd-4420-99b4-3d0ebe1580d8 service nova] Acquired lock "refresh_cache-ba4bfbb4-a89b-4ab6-964e-792647fd5a89" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1063.099817] env[68674]: DEBUG nova.network.neutron [req-d334c2f8-fddd-4f02-9f72-0af94c066ea8 req-0f956308-82cd-4420-99b4-3d0ebe1580d8 service nova] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Refreshing network info cache for port 7309bd2b-c077-4257-8efb-bf6e8d516ab7 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1063.119248] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a062f90d-19f1-48fc-a263-aaca9fe0a48e tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "interface-66f4ab32-ef66-4d1d-93b6-775d59ce3c41-dc748df7-150d-4b34-a259-782775725005" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1063.120241] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a062f90d-19f1-48fc-a263-aaca9fe0a48e tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "interface-66f4ab32-ef66-4d1d-93b6-775d59ce3c41-dc748df7-150d-4b34-a259-782775725005" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1063.120498] env[68674]: DEBUG nova.objects.instance [None req-a062f90d-19f1-48fc-a263-aaca9fe0a48e tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lazy-loading 'flavor' on Instance uuid 66f4ab32-ef66-4d1d-93b6-775d59ce3c41 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1063.203093] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcb0f6a8-68d1-4148-b673-812c67d92fb0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.213838] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a17d635-82a4-437e-877c-7bb6c57003c7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.216953] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240730, 'name': CreateVM_Task, 'duration_secs': 0.557907} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.217150] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1063.218436] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.218668] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1063.219212] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1063.219365] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-616a4df1-9a80-4bc4-84fb-ddb2ae916d83 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.246973] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2992ee29-6fb1-47d7-ade1-7419bed7c7d8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.251012] env[68674]: DEBUG oslo_vmware.api [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1063.251012] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a487c5-ae19-95d7-bd4f-707382707415" [ 1063.251012] env[68674]: _type = "Task" [ 1063.251012] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.258124] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff09072e-5876-4d5f-bb1e-ceaf7c1828a8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.265342] env[68674]: DEBUG oslo_vmware.api [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a487c5-ae19-95d7-bd4f-707382707415, 'name': SearchDatastore_Task, 'duration_secs': 0.010306} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.266078] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1063.266288] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1063.266522] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.276958] env[68674]: DEBUG nova.compute.provider_tree [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1063.348750] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquiring lock "refresh_cache-ba4bfbb4-a89b-4ab6-964e-792647fd5a89" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.393258] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0874716c-59d7-4297-baea-f4730106f0d3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.404593] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27275762-99e4-4208-9426-8a0d53900585 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.436722] env[68674]: DEBUG nova.compute.manager [req-b259d6de-f42b-476d-a122-36c087fb30dc req-43b0baf5-00c7-43f4-8313-b2bc33d439e0 service nova] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Detach interface failed, port_id=4227b11c-e4da-42b5-80c0-af15c0b4de7f, reason: Instance 182deaf0-c20a-4041-8f41-81786d6b053e could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1063.501744] env[68674]: DEBUG oslo_vmware.api [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525567e7-2850-ff7e-d3e4-7b45538f8fc0, 'name': SearchDatastore_Task, 'duration_secs': 0.009473} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.502115] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1063.502384] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] cbccde73-b903-47f7-9cbc-f0b376a03435/cbccde73-b903-47f7-9cbc-f0b376a03435.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1063.502674] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1063.502861] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1063.503094] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-11fec661-3b8a-41ef-b60b-5a60922ea9a8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.505328] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2f7996d7-8f2f-4fc6-9446-32921072ad9c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.512413] env[68674]: DEBUG oslo_vmware.api [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Waiting for the task: (returnval){ [ 1063.512413] env[68674]: value = "task-3240731" [ 1063.512413] env[68674]: _type = "Task" [ 1063.512413] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.516710] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1063.516944] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1063.518053] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c5b3f70-ad80-4982-b870-163435544661 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.523565] env[68674]: DEBUG oslo_vmware.api [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Task: {'id': task-3240731, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.527220] env[68674]: DEBUG oslo_vmware.api [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1063.527220] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524cfa93-8152-e3e0-a959-cd0c6fb81fea" [ 1063.527220] env[68674]: _type = "Task" [ 1063.527220] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.535489] env[68674]: DEBUG oslo_vmware.api [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524cfa93-8152-e3e0-a959-cd0c6fb81fea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.642302] env[68674]: DEBUG nova.network.neutron [req-d334c2f8-fddd-4f02-9f72-0af94c066ea8 req-0f956308-82cd-4420-99b4-3d0ebe1580d8 service nova] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1063.692974] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8b7c30c-6de2-4f4c-9b65-92a9ca2e2222 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.720988] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9118f2b5-17a2-416d-86c8-b05cb05d59c3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.730281] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Updating instance '2a7a6269-65a8-402c-b174-a4a46d20a33a' progress to 83 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1063.756110] env[68674]: DEBUG nova.network.neutron [req-d334c2f8-fddd-4f02-9f72-0af94c066ea8 req-0f956308-82cd-4420-99b4-3d0ebe1580d8 service nova] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1063.782026] env[68674]: DEBUG nova.scheduler.client.report [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1063.785697] env[68674]: DEBUG nova.objects.instance [None req-a062f90d-19f1-48fc-a263-aaca9fe0a48e tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lazy-loading 'pci_requests' on Instance uuid 66f4ab32-ef66-4d1d-93b6-775d59ce3c41 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1064.023066] env[68674]: DEBUG oslo_vmware.api [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Task: {'id': task-3240731, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.038451] env[68674]: DEBUG oslo_vmware.api [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524cfa93-8152-e3e0-a959-cd0c6fb81fea, 'name': SearchDatastore_Task, 'duration_secs': 0.009371} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.039469] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-783532a9-0d0a-4a81-a45d-86b298a44b75 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.045558] env[68674]: DEBUG oslo_vmware.api [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1064.045558] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d0a227-9cc3-ae4d-7249-36b6fe5c70a1" [ 1064.045558] env[68674]: _type = "Task" [ 1064.045558] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.055177] env[68674]: DEBUG oslo_vmware.api [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d0a227-9cc3-ae4d-7249-36b6fe5c70a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.237473] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1064.237751] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-67d3ba7d-bc54-48a1-be9a-c40a6390e5b5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.244431] env[68674]: DEBUG oslo_vmware.api [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1064.244431] env[68674]: value = "task-3240732" [ 1064.244431] env[68674]: _type = "Task" [ 1064.244431] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.251756] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1064.251980] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1064.256311] env[68674]: DEBUG oslo_vmware.api [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240732, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.259054] env[68674]: DEBUG oslo_concurrency.lockutils [req-d334c2f8-fddd-4f02-9f72-0af94c066ea8 req-0f956308-82cd-4420-99b4-3d0ebe1580d8 service nova] Releasing lock "refresh_cache-ba4bfbb4-a89b-4ab6-964e-792647fd5a89" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1064.259406] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquired lock "refresh_cache-ba4bfbb4-a89b-4ab6-964e-792647fd5a89" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1064.259565] env[68674]: DEBUG nova.network.neutron [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1064.288376] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.318s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1064.289876] env[68674]: DEBUG nova.compute.manager [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1064.292066] env[68674]: DEBUG nova.objects.base [None req-a062f90d-19f1-48fc-a263-aaca9fe0a48e tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Object Instance<66f4ab32-ef66-4d1d-93b6-775d59ce3c41> lazy-loaded attributes: flavor,pci_requests {{(pid=68674) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1064.292066] env[68674]: DEBUG nova.network.neutron [None req-a062f90d-19f1-48fc-a263-aaca9fe0a48e tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1064.295047] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5ef93a02-8132-46fd-8110-a27efe81ac66 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.893s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1064.295047] env[68674]: DEBUG nova.objects.instance [None req-5ef93a02-8132-46fd-8110-a27efe81ac66 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lazy-loading 'resources' on Instance uuid 8d810cc0-3f85-49c9-9d7d-8e1711a97015 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1064.390244] env[68674]: DEBUG nova.policy [None req-a062f90d-19f1-48fc-a263-aaca9fe0a48e tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fd6c4d1912754a2ea44a65b455b7413c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '21163cbc3a5a4dc3abc832c4560c33e2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 1064.522843] env[68674]: DEBUG oslo_vmware.api [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Task: {'id': task-3240731, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.666624} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.523222] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] cbccde73-b903-47f7-9cbc-f0b376a03435/cbccde73-b903-47f7-9cbc-f0b376a03435.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1064.523372] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1064.523565] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d33cd625-baf1-49b2-acd3-16a29b996f62 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.530180] env[68674]: DEBUG oslo_vmware.api [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Waiting for the task: (returnval){ [ 1064.530180] env[68674]: value = "task-3240733" [ 1064.530180] env[68674]: _type = "Task" [ 1064.530180] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.538927] env[68674]: DEBUG oslo_vmware.api [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Task: {'id': task-3240733, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.555727] env[68674]: DEBUG oslo_vmware.api [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d0a227-9cc3-ae4d-7249-36b6fe5c70a1, 'name': SearchDatastore_Task, 'duration_secs': 0.060319} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.556000] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1064.556323] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 4214e971-ca72-4c9f-a355-78e5ad8d8219/4214e971-ca72-4c9f-a355-78e5ad8d8219.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1064.556601] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0177b7ff-b95a-4006-a59b-7c38d356454c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.562586] env[68674]: DEBUG oslo_vmware.api [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1064.562586] env[68674]: value = "task-3240734" [ 1064.562586] env[68674]: _type = "Task" [ 1064.562586] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.572222] env[68674]: DEBUG oslo_vmware.api [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240734, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.754212] env[68674]: DEBUG oslo_vmware.api [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240732, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.757741] env[68674]: DEBUG nova.compute.manager [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1064.791818] env[68674]: DEBUG nova.network.neutron [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1064.796523] env[68674]: DEBUG nova.compute.utils [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1064.798554] env[68674]: DEBUG nova.compute.manager [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1064.798711] env[68674]: DEBUG nova.network.neutron [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1064.882480] env[68674]: DEBUG nova.policy [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5080a4f68ef1482caaee5aa26614e6f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c958fcb56a934ef7919b76aa2a193429', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 1065.032012] env[68674]: DEBUG nova.network.neutron [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Updating instance_info_cache with network_info: [{"id": "7309bd2b-c077-4257-8efb-bf6e8d516ab7", "address": "fa:16:3e:e4:5d:a7", "network": {"id": "e4b29de6-94e6-452e-b362-eb8d7dd615b9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2121858122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2bca98e5a30741249b1bdee899ffe433", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721c6720-3ce0-450e-9951-a894f03acc27", "external-id": "nsx-vlan-transportzone-394", "segmentation_id": 394, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7309bd2b-c0", "ovs_interfaceid": "7309bd2b-c077-4257-8efb-bf6e8d516ab7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1065.043119] env[68674]: DEBUG oslo_vmware.api [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Task: {'id': task-3240733, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.113117} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.043486] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1065.044269] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-457b108e-cc9d-464d-a65f-05cc284d7e74 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.075890] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] cbccde73-b903-47f7-9cbc-f0b376a03435/cbccde73-b903-47f7-9cbc-f0b376a03435.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1065.082420] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aabdc9f8-cf0d-4583-ae22-b272a5fe25a3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.106673] env[68674]: DEBUG oslo_vmware.api [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240734, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.108988] env[68674]: DEBUG oslo_vmware.api [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Waiting for the task: (returnval){ [ 1065.108988] env[68674]: value = "task-3240735" [ 1065.108988] env[68674]: _type = "Task" [ 1065.108988] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.113077] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d5e1d95-5c9e-4904-94a9-d1c9fc03efff {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.121207] env[68674]: DEBUG oslo_vmware.api [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Task: {'id': task-3240735, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.123982] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e903872e-420d-4f6a-bdd3-f5d80b18c35f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.156052] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5467b37-507b-44ad-8208-98d4be1eb2c6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.164586] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9deddee-b843-429c-906f-0661d43b8b9c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.181373] env[68674]: DEBUG nova.compute.provider_tree [None req-5ef93a02-8132-46fd-8110-a27efe81ac66 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1065.232439] env[68674]: DEBUG nova.network.neutron [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Successfully created port: 7111fb79-ad70-4af7-9c47-0e2443a51a32 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1065.255831] env[68674]: DEBUG oslo_vmware.api [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240732, 'name': PowerOnVM_Task, 'duration_secs': 0.725877} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.255903] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1065.256078] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-43d922c7-a265-4daa-acf2-9fb26d512a5f tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Updating instance '2a7a6269-65a8-402c-b174-a4a46d20a33a' progress to 100 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1065.276304] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1065.305481] env[68674]: DEBUG nova.compute.manager [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1065.538801] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Releasing lock "refresh_cache-ba4bfbb4-a89b-4ab6-964e-792647fd5a89" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1065.539173] env[68674]: DEBUG nova.compute.manager [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Instance network_info: |[{"id": "7309bd2b-c077-4257-8efb-bf6e8d516ab7", "address": "fa:16:3e:e4:5d:a7", "network": {"id": "e4b29de6-94e6-452e-b362-eb8d7dd615b9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2121858122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2bca98e5a30741249b1bdee899ffe433", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721c6720-3ce0-450e-9951-a894f03acc27", "external-id": "nsx-vlan-transportzone-394", "segmentation_id": 394, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7309bd2b-c0", "ovs_interfaceid": "7309bd2b-c077-4257-8efb-bf6e8d516ab7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1065.539612] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:5d:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '721c6720-3ce0-450e-9951-a894f03acc27', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7309bd2b-c077-4257-8efb-bf6e8d516ab7', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1065.547280] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1065.547507] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1065.547733] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5aa4999d-f77b-44df-9728-e294917a738c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.570950] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1065.570950] env[68674]: value = "task-3240736" [ 1065.570950] env[68674]: _type = "Task" [ 1065.570950] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.577433] env[68674]: DEBUG oslo_vmware.api [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240734, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.800295} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.578150] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 4214e971-ca72-4c9f-a355-78e5ad8d8219/4214e971-ca72-4c9f-a355-78e5ad8d8219.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1065.578556] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1065.578845] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e6be7cbd-4faa-4b41-8052-e384df656c13 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.583777] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240736, 'name': CreateVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.587944] env[68674]: DEBUG oslo_vmware.api [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1065.587944] env[68674]: value = "task-3240737" [ 1065.587944] env[68674]: _type = "Task" [ 1065.587944] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.595301] env[68674]: DEBUG oslo_vmware.api [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240737, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.619488] env[68674]: DEBUG oslo_vmware.api [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Task: {'id': task-3240735, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.684894] env[68674]: DEBUG nova.scheduler.client.report [None req-5ef93a02-8132-46fd-8110-a27efe81ac66 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1065.844570] env[68674]: DEBUG nova.compute.manager [req-22d69cb5-737a-44bd-9006-f7d2064be5ad req-b82497ef-621b-4550-a6f1-4b8a2bda155e service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Received event network-vif-plugged-dc748df7-150d-4b34-a259-782775725005 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1065.844927] env[68674]: DEBUG oslo_concurrency.lockutils [req-22d69cb5-737a-44bd-9006-f7d2064be5ad req-b82497ef-621b-4550-a6f1-4b8a2bda155e service nova] Acquiring lock "66f4ab32-ef66-4d1d-93b6-775d59ce3c41-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1065.845224] env[68674]: DEBUG oslo_concurrency.lockutils [req-22d69cb5-737a-44bd-9006-f7d2064be5ad req-b82497ef-621b-4550-a6f1-4b8a2bda155e service nova] Lock "66f4ab32-ef66-4d1d-93b6-775d59ce3c41-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1065.845520] env[68674]: DEBUG oslo_concurrency.lockutils [req-22d69cb5-737a-44bd-9006-f7d2064be5ad req-b82497ef-621b-4550-a6f1-4b8a2bda155e service nova] Lock "66f4ab32-ef66-4d1d-93b6-775d59ce3c41-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1065.845817] env[68674]: DEBUG nova.compute.manager [req-22d69cb5-737a-44bd-9006-f7d2064be5ad req-b82497ef-621b-4550-a6f1-4b8a2bda155e service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] No waiting events found dispatching network-vif-plugged-dc748df7-150d-4b34-a259-782775725005 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1065.845998] env[68674]: WARNING nova.compute.manager [req-22d69cb5-737a-44bd-9006-f7d2064be5ad req-b82497ef-621b-4550-a6f1-4b8a2bda155e service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Received unexpected event network-vif-plugged-dc748df7-150d-4b34-a259-782775725005 for instance with vm_state active and task_state None. [ 1065.921843] env[68674]: DEBUG nova.network.neutron [None req-a062f90d-19f1-48fc-a263-aaca9fe0a48e tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Successfully updated port: dc748df7-150d-4b34-a259-782775725005 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1066.081122] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240736, 'name': CreateVM_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.095747] env[68674]: DEBUG oslo_vmware.api [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240737, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092182} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.095992] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1066.096757] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83c8a4d2-c44b-4437-8413-d44446997cda {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.120232] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] 4214e971-ca72-4c9f-a355-78e5ad8d8219/4214e971-ca72-4c9f-a355-78e5ad8d8219.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1066.123255] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b14df8f5-9c4e-4722-9d50-e433555bc2fc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.143729] env[68674]: DEBUG oslo_vmware.api [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Task: {'id': task-3240735, 'name': ReconfigVM_Task, 'duration_secs': 0.832931} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.144951] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Reconfigured VM instance instance-00000068 to attach disk [datastore1] cbccde73-b903-47f7-9cbc-f0b376a03435/cbccde73-b903-47f7-9cbc-f0b376a03435.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1066.145691] env[68674]: DEBUG oslo_vmware.api [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1066.145691] env[68674]: value = "task-3240738" [ 1066.145691] env[68674]: _type = "Task" [ 1066.145691] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.145918] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-db2e4455-eb4a-40b8-b0ca-4046606333d1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.155558] env[68674]: DEBUG oslo_vmware.api [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240738, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.157029] env[68674]: DEBUG oslo_vmware.api [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Waiting for the task: (returnval){ [ 1066.157029] env[68674]: value = "task-3240739" [ 1066.157029] env[68674]: _type = "Task" [ 1066.157029] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.163997] env[68674]: DEBUG oslo_vmware.api [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Task: {'id': task-3240739, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.190138] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5ef93a02-8132-46fd-8110-a27efe81ac66 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.895s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1066.192927] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d0625b1c-ff75-46df-9ca5-e99561fd97b2 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.021s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1066.193461] env[68674]: DEBUG nova.objects.instance [None req-d0625b1c-ff75-46df-9ca5-e99561fd97b2 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Lazy-loading 'resources' on Instance uuid f6f5fb73-521a-4c83-93ea-a1eb2af2e142 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1066.211929] env[68674]: INFO nova.scheduler.client.report [None req-5ef93a02-8132-46fd-8110-a27efe81ac66 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Deleted allocations for instance 8d810cc0-3f85-49c9-9d7d-8e1711a97015 [ 1066.316375] env[68674]: DEBUG nova.compute.manager [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1066.342217] env[68674]: DEBUG nova.virt.hardware [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1066.342495] env[68674]: DEBUG nova.virt.hardware [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1066.342654] env[68674]: DEBUG nova.virt.hardware [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1066.342843] env[68674]: DEBUG nova.virt.hardware [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1066.342995] env[68674]: DEBUG nova.virt.hardware [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1066.343164] env[68674]: DEBUG nova.virt.hardware [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1066.343391] env[68674]: DEBUG nova.virt.hardware [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1066.343554] env[68674]: DEBUG nova.virt.hardware [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1066.343725] env[68674]: DEBUG nova.virt.hardware [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1066.343890] env[68674]: DEBUG nova.virt.hardware [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1066.345035] env[68674]: DEBUG nova.virt.hardware [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1066.345035] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41b53dee-8802-4aec-b203-e40e1f09c6b7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.354364] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3e8705e-3b46-4927-84c3-92c80bdcc51f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.425825] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a062f90d-19f1-48fc-a263-aaca9fe0a48e tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "refresh_cache-66f4ab32-ef66-4d1d-93b6-775d59ce3c41" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1066.425825] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a062f90d-19f1-48fc-a263-aaca9fe0a48e tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquired lock "refresh_cache-66f4ab32-ef66-4d1d-93b6-775d59ce3c41" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1066.425825] env[68674]: DEBUG nova.network.neutron [None req-a062f90d-19f1-48fc-a263-aaca9fe0a48e tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1066.584510] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240736, 'name': CreateVM_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.621123] env[68674]: DEBUG nova.compute.manager [req-4dd4f6fb-cd18-441f-b86c-365314154d05 req-93cbd450-7479-4540-b0e0-ae89ec74bb24 service nova] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Received event network-vif-plugged-7111fb79-ad70-4af7-9c47-0e2443a51a32 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1066.621359] env[68674]: DEBUG oslo_concurrency.lockutils [req-4dd4f6fb-cd18-441f-b86c-365314154d05 req-93cbd450-7479-4540-b0e0-ae89ec74bb24 service nova] Acquiring lock "dbbf1313-6e44-45e2-8bf6-83409f06cb4b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1066.621577] env[68674]: DEBUG oslo_concurrency.lockutils [req-4dd4f6fb-cd18-441f-b86c-365314154d05 req-93cbd450-7479-4540-b0e0-ae89ec74bb24 service nova] Lock "dbbf1313-6e44-45e2-8bf6-83409f06cb4b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1066.621746] env[68674]: DEBUG oslo_concurrency.lockutils [req-4dd4f6fb-cd18-441f-b86c-365314154d05 req-93cbd450-7479-4540-b0e0-ae89ec74bb24 service nova] Lock "dbbf1313-6e44-45e2-8bf6-83409f06cb4b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1066.622376] env[68674]: DEBUG nova.compute.manager [req-4dd4f6fb-cd18-441f-b86c-365314154d05 req-93cbd450-7479-4540-b0e0-ae89ec74bb24 service nova] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] No waiting events found dispatching network-vif-plugged-7111fb79-ad70-4af7-9c47-0e2443a51a32 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1066.622650] env[68674]: WARNING nova.compute.manager [req-4dd4f6fb-cd18-441f-b86c-365314154d05 req-93cbd450-7479-4540-b0e0-ae89ec74bb24 service nova] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Received unexpected event network-vif-plugged-7111fb79-ad70-4af7-9c47-0e2443a51a32 for instance with vm_state building and task_state spawning. [ 1066.657977] env[68674]: DEBUG oslo_vmware.api [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240738, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.666173] env[68674]: DEBUG oslo_vmware.api [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Task: {'id': task-3240739, 'name': Rename_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.720310] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5ef93a02-8132-46fd-8110-a27efe81ac66 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "8d810cc0-3f85-49c9-9d7d-8e1711a97015" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.942s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1066.725153] env[68674]: DEBUG nova.network.neutron [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Successfully updated port: 7111fb79-ad70-4af7-9c47-0e2443a51a32 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1066.938296] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b34331d6-4838-43e1-879b-4423cb5113fa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.946887] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03bda5e5-3c49-44ad-98a9-2fd08c1198cc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.982196] env[68674]: WARNING nova.network.neutron [None req-a062f90d-19f1-48fc-a263-aaca9fe0a48e tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] a803f1d7-ea36-4d0a-9a85-9b7a8d27f698 already exists in list: networks containing: ['a803f1d7-ea36-4d0a-9a85-9b7a8d27f698']. ignoring it [ 1066.984724] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7edb740-3ea3-4749-9db8-23bf1e983101 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.995804] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ba08f4d-6edc-414d-ae16-076353e2df83 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.012496] env[68674]: DEBUG nova.compute.provider_tree [None req-d0625b1c-ff75-46df-9ca5-e99561fd97b2 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1067.085304] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240736, 'name': CreateVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.157410] env[68674]: DEBUG oslo_vmware.api [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240738, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.164777] env[68674]: DEBUG oslo_vmware.api [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Task: {'id': task-3240739, 'name': Rename_Task, 'duration_secs': 0.942041} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.166979] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1067.167233] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e7acd639-5b0a-439a-8fa6-d76143b4359c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.173308] env[68674]: DEBUG oslo_vmware.api [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Waiting for the task: (returnval){ [ 1067.173308] env[68674]: value = "task-3240740" [ 1067.173308] env[68674]: _type = "Task" [ 1067.173308] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.180531] env[68674]: DEBUG oslo_vmware.api [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Task: {'id': task-3240740, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.227615] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "refresh_cache-dbbf1313-6e44-45e2-8bf6-83409f06cb4b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1067.227712] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquired lock "refresh_cache-dbbf1313-6e44-45e2-8bf6-83409f06cb4b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1067.227860] env[68674]: DEBUG nova.network.neutron [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1067.278464] env[68674]: DEBUG nova.network.neutron [None req-a062f90d-19f1-48fc-a263-aaca9fe0a48e tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Updating instance_info_cache with network_info: [{"id": "270836ed-f229-45ed-b23b-58f26fa997be", "address": "fa:16:3e:50:c3:4d", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap270836ed-f2", "ovs_interfaceid": "270836ed-f229-45ed-b23b-58f26fa997be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "dc748df7-150d-4b34-a259-782775725005", "address": "fa:16:3e:92:eb:c8", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc748df7-15", "ovs_interfaceid": "dc748df7-150d-4b34-a259-782775725005", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1067.517191] env[68674]: DEBUG nova.scheduler.client.report [None req-d0625b1c-ff75-46df-9ca5-e99561fd97b2 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1067.520680] env[68674]: DEBUG oslo_concurrency.lockutils [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "caed484b-6fb0-41f2-a35f-8f85117dcf15" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1067.520887] env[68674]: DEBUG oslo_concurrency.lockutils [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "caed484b-6fb0-41f2-a35f-8f85117dcf15" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1067.583738] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240736, 'name': CreateVM_Task, 'duration_secs': 1.552755} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.583916] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1067.584698] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1067.584984] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1067.585284] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1067.585873] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae5d97fe-76d6-49c0-93a9-cb7cdf2d4765 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.590521] env[68674]: DEBUG oslo_vmware.api [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 1067.590521] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52830add-e0f2-8349-a163-ab49cc60621c" [ 1067.590521] env[68674]: _type = "Task" [ 1067.590521] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.598744] env[68674]: DEBUG oslo_vmware.api [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52830add-e0f2-8349-a163-ab49cc60621c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.658679] env[68674]: DEBUG oslo_vmware.api [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240738, 'name': ReconfigVM_Task, 'duration_secs': 1.205501} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.658975] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Reconfigured VM instance instance-00000069 to attach disk [datastore1] 4214e971-ca72-4c9f-a355-78e5ad8d8219/4214e971-ca72-4c9f-a355-78e5ad8d8219.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1067.659638] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f6c39e0d-e788-4844-8ad9-e71dd7292ec5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.666902] env[68674]: DEBUG oslo_vmware.api [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1067.666902] env[68674]: value = "task-3240741" [ 1067.666902] env[68674]: _type = "Task" [ 1067.666902] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.675921] env[68674]: DEBUG oslo_vmware.api [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240741, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.684239] env[68674]: DEBUG oslo_vmware.api [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Task: {'id': task-3240740, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.760811] env[68674]: DEBUG nova.network.neutron [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1067.781029] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a062f90d-19f1-48fc-a263-aaca9fe0a48e tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Releasing lock "refresh_cache-66f4ab32-ef66-4d1d-93b6-775d59ce3c41" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1067.781562] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a062f90d-19f1-48fc-a263-aaca9fe0a48e tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "66f4ab32-ef66-4d1d-93b6-775d59ce3c41" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1067.781762] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a062f90d-19f1-48fc-a263-aaca9fe0a48e tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquired lock "66f4ab32-ef66-4d1d-93b6-775d59ce3c41" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1067.783095] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f30558d9-f60a-413a-8dde-a1cd776c1987 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.801335] env[68674]: DEBUG nova.virt.hardware [None req-a062f90d-19f1-48fc-a263-aaca9fe0a48e tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1067.801584] env[68674]: DEBUG nova.virt.hardware [None req-a062f90d-19f1-48fc-a263-aaca9fe0a48e tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1067.801764] env[68674]: DEBUG nova.virt.hardware [None req-a062f90d-19f1-48fc-a263-aaca9fe0a48e tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1067.801987] env[68674]: DEBUG nova.virt.hardware [None req-a062f90d-19f1-48fc-a263-aaca9fe0a48e tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1067.802246] env[68674]: DEBUG nova.virt.hardware [None req-a062f90d-19f1-48fc-a263-aaca9fe0a48e tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1067.802412] env[68674]: DEBUG nova.virt.hardware [None req-a062f90d-19f1-48fc-a263-aaca9fe0a48e tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1067.802660] env[68674]: DEBUG nova.virt.hardware [None req-a062f90d-19f1-48fc-a263-aaca9fe0a48e tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1067.802813] env[68674]: DEBUG nova.virt.hardware [None req-a062f90d-19f1-48fc-a263-aaca9fe0a48e tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1067.802998] env[68674]: DEBUG nova.virt.hardware [None req-a062f90d-19f1-48fc-a263-aaca9fe0a48e tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1067.803205] env[68674]: DEBUG nova.virt.hardware [None req-a062f90d-19f1-48fc-a263-aaca9fe0a48e tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1067.803401] env[68674]: DEBUG nova.virt.hardware [None req-a062f90d-19f1-48fc-a263-aaca9fe0a48e tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1067.810076] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a062f90d-19f1-48fc-a263-aaca9fe0a48e tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Reconfiguring VM to attach interface {{(pid=68674) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1067.812782] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fae8844b-e90b-4b40-a4a5-a01e5bd6d65f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.829903] env[68674]: DEBUG oslo_vmware.api [None req-a062f90d-19f1-48fc-a263-aaca9fe0a48e tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 1067.829903] env[68674]: value = "task-3240742" [ 1067.829903] env[68674]: _type = "Task" [ 1067.829903] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.838738] env[68674]: DEBUG oslo_vmware.api [None req-a062f90d-19f1-48fc-a263-aaca9fe0a48e tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240742, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.922920] env[68674]: DEBUG nova.network.neutron [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Updating instance_info_cache with network_info: [{"id": "7111fb79-ad70-4af7-9c47-0e2443a51a32", "address": "fa:16:3e:b5:d6:a6", "network": {"id": "eae87694-bbf6-4eed-9305-26be80e0529b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1262353116-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c958fcb56a934ef7919b76aa2a193429", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7111fb79-ad", "ovs_interfaceid": "7111fb79-ad70-4af7-9c47-0e2443a51a32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1068.021481] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d0625b1c-ff75-46df-9ca5-e99561fd97b2 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.828s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1068.024138] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 7.835s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1068.025888] env[68674]: DEBUG nova.compute.manager [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1068.058183] env[68674]: INFO nova.scheduler.client.report [None req-d0625b1c-ff75-46df-9ca5-e99561fd97b2 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Deleted allocations for instance f6f5fb73-521a-4c83-93ea-a1eb2af2e142 [ 1068.098056] env[68674]: DEBUG nova.network.neutron [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Port 4fae8d88-2aaa-48bd-b0c4-72bc768efce3 binding to destination host cpu-1 is already ACTIVE {{(pid=68674) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1068.098056] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "refresh_cache-2a7a6269-65a8-402c-b174-a4a46d20a33a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.098056] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquired lock "refresh_cache-2a7a6269-65a8-402c-b174-a4a46d20a33a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1068.098056] env[68674]: DEBUG nova.network.neutron [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1068.105517] env[68674]: DEBUG oslo_vmware.api [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52830add-e0f2-8349-a163-ab49cc60621c, 'name': SearchDatastore_Task, 'duration_secs': 0.010744} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.106398] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1068.106642] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1068.106874] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.107034] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1068.107212] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1068.108394] env[68674]: DEBUG nova.compute.manager [req-3a231c34-ffad-4bca-9ab1-06261b3c8af4 req-d0068919-af7a-4ff8-a8f9-17427552518c service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Received event network-changed-dc748df7-150d-4b34-a259-782775725005 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1068.108583] env[68674]: DEBUG nova.compute.manager [req-3a231c34-ffad-4bca-9ab1-06261b3c8af4 req-d0068919-af7a-4ff8-a8f9-17427552518c service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Refreshing instance network info cache due to event network-changed-dc748df7-150d-4b34-a259-782775725005. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1068.108789] env[68674]: DEBUG oslo_concurrency.lockutils [req-3a231c34-ffad-4bca-9ab1-06261b3c8af4 req-d0068919-af7a-4ff8-a8f9-17427552518c service nova] Acquiring lock "refresh_cache-66f4ab32-ef66-4d1d-93b6-775d59ce3c41" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.108930] env[68674]: DEBUG oslo_concurrency.lockutils [req-3a231c34-ffad-4bca-9ab1-06261b3c8af4 req-d0068919-af7a-4ff8-a8f9-17427552518c service nova] Acquired lock "refresh_cache-66f4ab32-ef66-4d1d-93b6-775d59ce3c41" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1068.109104] env[68674]: DEBUG nova.network.neutron [req-3a231c34-ffad-4bca-9ab1-06261b3c8af4 req-d0068919-af7a-4ff8-a8f9-17427552518c service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Refreshing network info cache for port dc748df7-150d-4b34-a259-782775725005 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1068.110779] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6a812a94-139c-4eec-a254-4605c3c26a5e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.121414] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1068.121617] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1068.122843] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf6fe673-e770-4bf5-9205-c6f969caaf20 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.129524] env[68674]: DEBUG oslo_vmware.api [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 1068.129524] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5287befb-093c-40db-5d6d-f2c2812621b4" [ 1068.129524] env[68674]: _type = "Task" [ 1068.129524] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.138347] env[68674]: DEBUG oslo_vmware.api [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5287befb-093c-40db-5d6d-f2c2812621b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.178603] env[68674]: DEBUG oslo_vmware.api [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240741, 'name': Rename_Task, 'duration_secs': 0.149279} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.182048] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1068.182468] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-351002bc-fe98-48dc-8b24-03ff564363d2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.190279] env[68674]: DEBUG oslo_vmware.api [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Task: {'id': task-3240740, 'name': PowerOnVM_Task, 'duration_secs': 0.530105} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.191529] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1068.191736] env[68674]: INFO nova.compute.manager [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Took 10.85 seconds to spawn the instance on the hypervisor. [ 1068.191919] env[68674]: DEBUG nova.compute.manager [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1068.192428] env[68674]: DEBUG oslo_vmware.api [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1068.192428] env[68674]: value = "task-3240743" [ 1068.192428] env[68674]: _type = "Task" [ 1068.192428] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.193215] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35599a96-dc83-4620-9334-4b10081084a7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.205747] env[68674]: DEBUG oslo_vmware.api [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240743, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.341567] env[68674]: DEBUG oslo_vmware.api [None req-a062f90d-19f1-48fc-a263-aaca9fe0a48e tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240742, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.426329] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Releasing lock "refresh_cache-dbbf1313-6e44-45e2-8bf6-83409f06cb4b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1068.426700] env[68674]: DEBUG nova.compute.manager [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Instance network_info: |[{"id": "7111fb79-ad70-4af7-9c47-0e2443a51a32", "address": "fa:16:3e:b5:d6:a6", "network": {"id": "eae87694-bbf6-4eed-9305-26be80e0529b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1262353116-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c958fcb56a934ef7919b76aa2a193429", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7111fb79-ad", "ovs_interfaceid": "7111fb79-ad70-4af7-9c47-0e2443a51a32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1068.427150] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b5:d6:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b98c49ac-0eb7-4311-aa8f-60581b2ce706', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7111fb79-ad70-4af7-9c47-0e2443a51a32', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1068.434990] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1068.435102] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1068.435378] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4df42ecd-39c0-4ef6-b967-ec0b0b240998 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.454879] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1068.454879] env[68674]: value = "task-3240744" [ 1068.454879] env[68674]: _type = "Task" [ 1068.454879] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.462337] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240744, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.552920] env[68674]: DEBUG oslo_concurrency.lockutils [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1068.567200] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d0625b1c-ff75-46df-9ca5-e99561fd97b2 tempest-ServersTestFqdnHostnames-2002620548 tempest-ServersTestFqdnHostnames-2002620548-project-member] Lock "f6f5fb73-521a-4c83-93ea-a1eb2af2e142" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.231s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1068.639869] env[68674]: DEBUG oslo_vmware.api [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5287befb-093c-40db-5d6d-f2c2812621b4, 'name': SearchDatastore_Task, 'duration_secs': 0.017028} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.640701] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00315ef4-1cf8-4a09-aa77-ac2dcfbc3754 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.646343] env[68674]: DEBUG oslo_vmware.api [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 1068.646343] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524f153f-1ee8-b3db-8b1b-3ab411143f01" [ 1068.646343] env[68674]: _type = "Task" [ 1068.646343] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.660228] env[68674]: DEBUG oslo_vmware.api [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524f153f-1ee8-b3db-8b1b-3ab411143f01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.707951] env[68674]: DEBUG oslo_vmware.api [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240743, 'name': PowerOnVM_Task} progress is 90%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.718838] env[68674]: INFO nova.compute.manager [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Took 23.78 seconds to build instance. [ 1068.771383] env[68674]: DEBUG nova.compute.manager [req-919733cd-e847-4171-81bd-1c24d86c0ec4 req-568e0d03-9071-449d-ab89-78fd8bb99e50 service nova] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Received event network-changed-7111fb79-ad70-4af7-9c47-0e2443a51a32 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1068.772318] env[68674]: DEBUG nova.compute.manager [req-919733cd-e847-4171-81bd-1c24d86c0ec4 req-568e0d03-9071-449d-ab89-78fd8bb99e50 service nova] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Refreshing instance network info cache due to event network-changed-7111fb79-ad70-4af7-9c47-0e2443a51a32. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1068.772629] env[68674]: DEBUG oslo_concurrency.lockutils [req-919733cd-e847-4171-81bd-1c24d86c0ec4 req-568e0d03-9071-449d-ab89-78fd8bb99e50 service nova] Acquiring lock "refresh_cache-dbbf1313-6e44-45e2-8bf6-83409f06cb4b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.772830] env[68674]: DEBUG oslo_concurrency.lockutils [req-919733cd-e847-4171-81bd-1c24d86c0ec4 req-568e0d03-9071-449d-ab89-78fd8bb99e50 service nova] Acquired lock "refresh_cache-dbbf1313-6e44-45e2-8bf6-83409f06cb4b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1068.773063] env[68674]: DEBUG nova.network.neutron [req-919733cd-e847-4171-81bd-1c24d86c0ec4 req-568e0d03-9071-449d-ab89-78fd8bb99e50 service nova] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Refreshing network info cache for port 7111fb79-ad70-4af7-9c47-0e2443a51a32 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1068.841203] env[68674]: DEBUG oslo_vmware.api [None req-a062f90d-19f1-48fc-a263-aaca9fe0a48e tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240742, 'name': ReconfigVM_Task, 'duration_secs': 0.892065} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.844930] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a062f90d-19f1-48fc-a263-aaca9fe0a48e tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Releasing lock "66f4ab32-ef66-4d1d-93b6-775d59ce3c41" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1068.845243] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a062f90d-19f1-48fc-a263-aaca9fe0a48e tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Reconfigured VM to attach interface {{(pid=68674) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1068.858157] env[68674]: DEBUG nova.network.neutron [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Updating instance_info_cache with network_info: [{"id": "4fae8d88-2aaa-48bd-b0c4-72bc768efce3", "address": "fa:16:3e:f8:a7:c2", "network": {"id": "cd9a6296-fa96-4117-b8b5-3884d0d16745", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1543887384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61ea6bfeb37d470a970e9c98e4827ade", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fae8d88-2a", "ovs_interfaceid": "4fae8d88-2aaa-48bd-b0c4-72bc768efce3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1068.935094] env[68674]: DEBUG nova.network.neutron [req-3a231c34-ffad-4bca-9ab1-06261b3c8af4 req-d0068919-af7a-4ff8-a8f9-17427552518c service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Updated VIF entry in instance network info cache for port dc748df7-150d-4b34-a259-782775725005. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1068.935625] env[68674]: DEBUG nova.network.neutron [req-3a231c34-ffad-4bca-9ab1-06261b3c8af4 req-d0068919-af7a-4ff8-a8f9-17427552518c service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Updating instance_info_cache with network_info: [{"id": "270836ed-f229-45ed-b23b-58f26fa997be", "address": "fa:16:3e:50:c3:4d", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap270836ed-f2", "ovs_interfaceid": "270836ed-f229-45ed-b23b-58f26fa997be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "dc748df7-150d-4b34-a259-782775725005", "address": "fa:16:3e:92:eb:c8", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc748df7-15", "ovs_interfaceid": "dc748df7-150d-4b34-a259-782775725005", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1068.965942] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240744, 'name': CreateVM_Task, 'duration_secs': 0.405057} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.966142] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1068.966820] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.966988] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1068.967327] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1068.967580] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91054e9d-b780-4e7a-a915-b89f85560e95 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.972425] env[68674]: DEBUG oslo_vmware.api [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 1068.972425] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52db839c-e835-d13b-5744-bf7d77fc65ab" [ 1068.972425] env[68674]: _type = "Task" [ 1068.972425] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.980449] env[68674]: DEBUG oslo_vmware.api [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52db839c-e835-d13b-5744-bf7d77fc65ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.039784] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Applying migration context for instance 2a7a6269-65a8-402c-b174-a4a46d20a33a as it has an incoming, in-progress migration 0f1a5cb9-1589-4d4f-8424-b56d0d078357. Migration status is reverting {{(pid=68674) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1069.041411] env[68674]: INFO nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Updating resource usage from migration 0f1a5cb9-1589-4d4f-8424-b56d0d078357 [ 1069.061845] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 23891bad-1b63-4237-9243-78954cf67d52 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1069.062012] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance e371ae6b-44fd-47ce-9c58-8981e7da5cbc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1069.062163] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance f70145c9-4846-42e1-9c1c-de9759097abd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1069.062318] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 30731a3c-34ba-40c8-9b8f-2d867eff4f21 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1069.062467] env[68674]: WARNING nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 182deaf0-c20a-4041-8f41-81786d6b053e is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1069.062588] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 66f4ab32-ef66-4d1d-93b6-775d59ce3c41 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1069.062704] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1069.062819] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance e9bebb3b-78ff-42b1-a350-efd1db5c6eaa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1069.062931] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance bd3ae195-6e01-49d5-9fcf-9520273d9108 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1069.063057] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Migration 0f1a5cb9-1589-4d4f-8424-b56d0d078357 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1069.063172] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 2a7a6269-65a8-402c-b174-a4a46d20a33a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1069.063286] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance cbccde73-b903-47f7-9cbc-f0b376a03435 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1069.063396] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 4214e971-ca72-4c9f-a355-78e5ad8d8219 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1069.063505] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance ba4bfbb4-a89b-4ab6-964e-792647fd5a89 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1069.063614] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance dbbf1313-6e44-45e2-8bf6-83409f06cb4b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1069.160215] env[68674]: DEBUG oslo_vmware.api [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524f153f-1ee8-b3db-8b1b-3ab411143f01, 'name': SearchDatastore_Task, 'duration_secs': 0.017236} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.160215] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1069.160215] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] ba4bfbb4-a89b-4ab6-964e-792647fd5a89/ba4bfbb4-a89b-4ab6-964e-792647fd5a89.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1069.160215] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a0023df9-d18a-42b0-b738-b1178a0f39a0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.165654] env[68674]: DEBUG oslo_vmware.api [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 1069.165654] env[68674]: value = "task-3240745" [ 1069.165654] env[68674]: _type = "Task" [ 1069.165654] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.173713] env[68674]: DEBUG oslo_vmware.api [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240745, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.206788] env[68674]: DEBUG oslo_vmware.api [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240743, 'name': PowerOnVM_Task, 'duration_secs': 0.6591} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.207158] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1069.207379] env[68674]: INFO nova.compute.manager [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Took 9.45 seconds to spawn the instance on the hypervisor. [ 1069.207565] env[68674]: DEBUG nova.compute.manager [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1069.208376] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d416f41c-8b7f-4e76-b102-6f7030c4e2ef {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.221367] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bf7546ae-44c2-4469-94f1-83786bcea71b tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Lock "cbccde73-b903-47f7-9cbc-f0b376a03435" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.289s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1069.350410] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a062f90d-19f1-48fc-a263-aaca9fe0a48e tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "interface-66f4ab32-ef66-4d1d-93b6-775d59ce3c41-dc748df7-150d-4b34-a259-782775725005" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.231s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1069.361052] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Releasing lock "refresh_cache-2a7a6269-65a8-402c-b174-a4a46d20a33a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1069.438755] env[68674]: DEBUG oslo_concurrency.lockutils [req-3a231c34-ffad-4bca-9ab1-06261b3c8af4 req-d0068919-af7a-4ff8-a8f9-17427552518c service nova] Releasing lock "refresh_cache-66f4ab32-ef66-4d1d-93b6-775d59ce3c41" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1069.484649] env[68674]: DEBUG oslo_vmware.api [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52db839c-e835-d13b-5744-bf7d77fc65ab, 'name': SearchDatastore_Task, 'duration_secs': 0.020415} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.484958] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1069.485231] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1069.485484] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1069.485635] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1069.485812] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1069.486087] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4295cc5b-0b9b-4c23-b221-45ddc98ef724 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.494702] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1069.494923] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1069.495839] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c960c518-758a-45cc-ad82-e1d8d5ccdca8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.502206] env[68674]: DEBUG oslo_vmware.api [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 1069.502206] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52729d41-71a1-d75b-24c7-b482ac4fd36d" [ 1069.502206] env[68674]: _type = "Task" [ 1069.502206] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.510860] env[68674]: DEBUG oslo_vmware.api [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52729d41-71a1-d75b-24c7-b482ac4fd36d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.543472] env[68674]: DEBUG nova.network.neutron [req-919733cd-e847-4171-81bd-1c24d86c0ec4 req-568e0d03-9071-449d-ab89-78fd8bb99e50 service nova] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Updated VIF entry in instance network info cache for port 7111fb79-ad70-4af7-9c47-0e2443a51a32. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1069.543863] env[68674]: DEBUG nova.network.neutron [req-919733cd-e847-4171-81bd-1c24d86c0ec4 req-568e0d03-9071-449d-ab89-78fd8bb99e50 service nova] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Updating instance_info_cache with network_info: [{"id": "7111fb79-ad70-4af7-9c47-0e2443a51a32", "address": "fa:16:3e:b5:d6:a6", "network": {"id": "eae87694-bbf6-4eed-9305-26be80e0529b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1262353116-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c958fcb56a934ef7919b76aa2a193429", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7111fb79-ad", "ovs_interfaceid": "7111fb79-ad70-4af7-9c47-0e2443a51a32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1069.568634] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1069.681054] env[68674]: DEBUG oslo_vmware.api [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240745, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.728963] env[68674]: INFO nova.compute.manager [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Took 20.66 seconds to build instance. [ 1069.847121] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a3fb9949-593a-4066-beb4-9607af99ea63 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquiring lock "142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1069.847399] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a3fb9949-593a-4066-beb4-9607af99ea63 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1069.864934] env[68674]: DEBUG nova.compute.manager [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=68674) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1069.892261] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e169a853-4098-4f6c-8754-7c2e3e4ac9e3 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Acquiring lock "cbccde73-b903-47f7-9cbc-f0b376a03435" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1069.892590] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e169a853-4098-4f6c-8754-7c2e3e4ac9e3 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Lock "cbccde73-b903-47f7-9cbc-f0b376a03435" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1069.892847] env[68674]: INFO nova.compute.manager [None req-e169a853-4098-4f6c-8754-7c2e3e4ac9e3 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Rebooting instance [ 1070.011884] env[68674]: DEBUG oslo_vmware.api [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52729d41-71a1-d75b-24c7-b482ac4fd36d, 'name': SearchDatastore_Task, 'duration_secs': 0.01022} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.012690] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-188e6e1d-2ae8-4db2-8de9-180577305937 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.017567] env[68674]: DEBUG oslo_vmware.api [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 1070.017567] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d664fb-3bdd-af89-d523-1370a3dea4e1" [ 1070.017567] env[68674]: _type = "Task" [ 1070.017567] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.026813] env[68674]: DEBUG oslo_vmware.api [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d664fb-3bdd-af89-d523-1370a3dea4e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.047197] env[68674]: DEBUG oslo_concurrency.lockutils [req-919733cd-e847-4171-81bd-1c24d86c0ec4 req-568e0d03-9071-449d-ab89-78fd8bb99e50 service nova] Releasing lock "refresh_cache-dbbf1313-6e44-45e2-8bf6-83409f06cb4b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1070.076022] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance caed484b-6fb0-41f2-a35f-8f85117dcf15 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1070.076022] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Total usable vcpus: 48, total allocated vcpus: 14 {{(pid=68674) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1070.076022] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3264MB phys_disk=200GB used_disk=14GB total_vcpus=48 used_vcpus=14 pci_stats=[] {{(pid=68674) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1070.182589] env[68674]: DEBUG oslo_vmware.api [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240745, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.859562} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.182860] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] ba4bfbb4-a89b-4ab6-964e-792647fd5a89/ba4bfbb4-a89b-4ab6-964e-792647fd5a89.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1070.183064] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1070.183319] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9c0cd56a-e7ec-42bd-8231-73dc8bf84a0b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.191687] env[68674]: DEBUG oslo_vmware.api [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 1070.191687] env[68674]: value = "task-3240746" [ 1070.191687] env[68674]: _type = "Task" [ 1070.191687] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.204396] env[68674]: DEBUG oslo_vmware.api [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240746, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.231266] env[68674]: DEBUG oslo_concurrency.lockutils [None req-25956230-308c-4368-9a84-0104586822e9 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Lock "4214e971-ca72-4c9f-a355-78e5ad8d8219" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.169s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1070.322433] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5780c345-9322-4d7f-bcc5-740d1887c346 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.330592] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ee5c4a4-960f-4dd7-8bfa-3dc1ff26119e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.361947] env[68674]: DEBUG nova.compute.utils [None req-a3fb9949-593a-4066-beb4-9607af99ea63 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1070.367426] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-492ac908-0169-4d47-b27c-f3d323da2d6d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.374777] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53474d30-bb76-4334-ad40-23bcbf3037e5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.389715] env[68674]: DEBUG nova.compute.provider_tree [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1070.420860] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e169a853-4098-4f6c-8754-7c2e3e4ac9e3 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Acquiring lock "refresh_cache-cbccde73-b903-47f7-9cbc-f0b376a03435" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1070.421153] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e169a853-4098-4f6c-8754-7c2e3e4ac9e3 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Acquired lock "refresh_cache-cbccde73-b903-47f7-9cbc-f0b376a03435" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1070.421407] env[68674]: DEBUG nova.network.neutron [None req-e169a853-4098-4f6c-8754-7c2e3e4ac9e3 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1070.532136] env[68674]: DEBUG oslo_vmware.api [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d664fb-3bdd-af89-d523-1370a3dea4e1, 'name': SearchDatastore_Task, 'duration_secs': 0.0139} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.532136] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1070.532136] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] dbbf1313-6e44-45e2-8bf6-83409f06cb4b/dbbf1313-6e44-45e2-8bf6-83409f06cb4b.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1070.532136] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-32c10035-5455-4fd9-91f4-9b11b7abea75 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.538403] env[68674]: DEBUG oslo_vmware.api [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 1070.538403] env[68674]: value = "task-3240747" [ 1070.538403] env[68674]: _type = "Task" [ 1070.538403] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.551020] env[68674]: DEBUG oslo_vmware.api [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240747, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.706832] env[68674]: DEBUG oslo_vmware.api [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240746, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.106003} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.707236] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1070.709815] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4570183-08a0-4f8f-9952-71d6ae55e8b6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.738587] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] ba4bfbb4-a89b-4ab6-964e-792647fd5a89/ba4bfbb4-a89b-4ab6-964e-792647fd5a89.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1070.738917] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b7287cea-505e-4f6a-a0b9-e207f4ecccdc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.763255] env[68674]: DEBUG oslo_vmware.api [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 1070.763255] env[68674]: value = "task-3240748" [ 1070.763255] env[68674]: _type = "Task" [ 1070.763255] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.772183] env[68674]: DEBUG oslo_vmware.api [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240748, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.865062] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a3fb9949-593a-4066-beb4-9607af99ea63 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.018s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1070.892916] env[68674]: DEBUG nova.scheduler.client.report [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1070.995547] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1071.053066] env[68674]: DEBUG oslo_vmware.api [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240747, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.165543] env[68674]: DEBUG nova.network.neutron [None req-e169a853-4098-4f6c-8754-7c2e3e4ac9e3 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Updating instance_info_cache with network_info: [{"id": "67abafc0-ffa5-4032-b312-9314d5c0e03a", "address": "fa:16:3e:b0:54:1d", "network": {"id": "a123b66b-623f-49df-8ff0-67d7dfca6f57", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1805484768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cd2b4690968b4f54a87770edc8628c75", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b94712a6-b777-47dd-bc06-f9acfce2d936", "external-id": "nsx-vlan-transportzone-494", "segmentation_id": 494, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67abafc0-ff", "ovs_interfaceid": "67abafc0-ffa5-4032-b312-9314d5c0e03a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1071.277666] env[68674]: DEBUG oslo_vmware.api [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240748, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.341548] env[68674]: INFO nova.compute.manager [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Rescuing [ 1071.341548] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Acquiring lock "refresh_cache-4214e971-ca72-4c9f-a355-78e5ad8d8219" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1071.341548] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Acquired lock "refresh_cache-4214e971-ca72-4c9f-a355-78e5ad8d8219" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1071.341548] env[68674]: DEBUG nova.network.neutron [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1071.398223] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68674) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1071.398472] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.374s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1071.398731] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6df51fcc-c89c-4bbd-b820-f694593bb7f9 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.250s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1071.398913] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6df51fcc-c89c-4bbd-b820-f694593bb7f9 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1071.401275] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.125s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1071.402784] env[68674]: INFO nova.compute.claims [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1071.430737] env[68674]: INFO nova.scheduler.client.report [None req-6df51fcc-c89c-4bbd-b820-f694593bb7f9 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Deleted allocations for instance 182deaf0-c20a-4041-8f41-81786d6b053e [ 1071.532615] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dd3fd1ff-16d9-42f9-8cbb-96f6b2770dd5 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "interface-66f4ab32-ef66-4d1d-93b6-775d59ce3c41-dc748df7-150d-4b34-a259-782775725005" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1071.532898] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dd3fd1ff-16d9-42f9-8cbb-96f6b2770dd5 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "interface-66f4ab32-ef66-4d1d-93b6-775d59ce3c41-dc748df7-150d-4b34-a259-782775725005" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1071.549877] env[68674]: DEBUG oslo_vmware.api [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240747, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.847487} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.550158] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] dbbf1313-6e44-45e2-8bf6-83409f06cb4b/dbbf1313-6e44-45e2-8bf6-83409f06cb4b.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1071.550752] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1071.550752] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-20ea536b-1f1b-47ef-929c-582a37c04234 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.559890] env[68674]: DEBUG oslo_vmware.api [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 1071.559890] env[68674]: value = "task-3240749" [ 1071.559890] env[68674]: _type = "Task" [ 1071.559890] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.568993] env[68674]: DEBUG oslo_vmware.api [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240749, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.667921] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e169a853-4098-4f6c-8754-7c2e3e4ac9e3 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Releasing lock "refresh_cache-cbccde73-b903-47f7-9cbc-f0b376a03435" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1071.773167] env[68674]: DEBUG oslo_vmware.api [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240748, 'name': ReconfigVM_Task, 'duration_secs': 0.653613} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.773491] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Reconfigured VM instance instance-0000006a to attach disk [datastore1] ba4bfbb4-a89b-4ab6-964e-792647fd5a89/ba4bfbb4-a89b-4ab6-964e-792647fd5a89.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1071.774138] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d32e2098-fd1b-4a87-b5a1-a5282a7889a4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.783126] env[68674]: DEBUG oslo_vmware.api [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 1071.783126] env[68674]: value = "task-3240750" [ 1071.783126] env[68674]: _type = "Task" [ 1071.783126] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.791423] env[68674]: DEBUG oslo_vmware.api [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240750, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.927797] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a3fb9949-593a-4066-beb4-9607af99ea63 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquiring lock "142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1071.928109] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a3fb9949-593a-4066-beb4-9607af99ea63 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1071.928322] env[68674]: INFO nova.compute.manager [None req-a3fb9949-593a-4066-beb4-9607af99ea63 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Attaching volume 45dbff42-8058-475f-b6ef-88e2d283c59a to /dev/sdb [ 1071.939144] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6df51fcc-c89c-4bbd-b820-f694593bb7f9 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "182deaf0-c20a-4041-8f41-81786d6b053e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.555s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1071.978079] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bf5ab44-f330-42b6-a4a3-040e72069691 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.986198] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f640a4-1747-4ca3-b872-1f824a77fcab {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.003452] env[68674]: DEBUG nova.virt.block_device [None req-a3fb9949-593a-4066-beb4-9607af99ea63 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Updating existing volume attachment record: fdfffad6-8d02-4d8b-b731-8ead02939bd1 {{(pid=68674) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1072.035477] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dd3fd1ff-16d9-42f9-8cbb-96f6b2770dd5 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "66f4ab32-ef66-4d1d-93b6-775d59ce3c41" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1072.035769] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dd3fd1ff-16d9-42f9-8cbb-96f6b2770dd5 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquired lock "66f4ab32-ef66-4d1d-93b6-775d59ce3c41" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1072.036596] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f24a3cf8-299b-4a35-8180-ddb1ef62cab1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.060417] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78360455-dbd1-4eb0-9029-497f64ff3f04 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.071483] env[68674]: DEBUG oslo_vmware.api [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240749, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.136253} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.086854] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1072.094289] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3fd1ff-16d9-42f9-8cbb-96f6b2770dd5 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Reconfiguring VM to detach interface {{(pid=68674) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1072.095588] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0141357-5ca8-4854-bc5b-d096c6fce55c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.100237] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2da8ad3b-3e70-469c-a451-b8528986ad69 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.117032] env[68674]: DEBUG nova.network.neutron [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Updating instance_info_cache with network_info: [{"id": "d5b4166a-1c05-4ad8-a9f4-697517d72f6c", "address": "fa:16:3e:c3:1f:45", "network": {"id": "1b0c763a-1b06-4dfb-9376-f9d411619180", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1366824526-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "43f00e26b76347d0bd40df46ac3acbcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "939c05b6-8f31-4f3a-95ac-6297e0bd243e", "external-id": "nsx-vlan-transportzone-825", "segmentation_id": 825, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5b4166a-1c", "ovs_interfaceid": "d5b4166a-1c05-4ad8-a9f4-697517d72f6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.139838] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] dbbf1313-6e44-45e2-8bf6-83409f06cb4b/dbbf1313-6e44-45e2-8bf6-83409f06cb4b.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1072.142197] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44a414cc-0c53-4547-b986-744ec4451219 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.157355] env[68674]: DEBUG oslo_vmware.api [None req-dd3fd1ff-16d9-42f9-8cbb-96f6b2770dd5 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 1072.157355] env[68674]: value = "task-3240751" [ 1072.157355] env[68674]: _type = "Task" [ 1072.157355] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.163431] env[68674]: DEBUG oslo_vmware.api [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 1072.163431] env[68674]: value = "task-3240753" [ 1072.163431] env[68674]: _type = "Task" [ 1072.163431] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.171812] env[68674]: DEBUG oslo_vmware.api [None req-dd3fd1ff-16d9-42f9-8cbb-96f6b2770dd5 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240751, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.172297] env[68674]: DEBUG nova.compute.manager [None req-e169a853-4098-4f6c-8754-7c2e3e4ac9e3 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1072.173413] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a58e996b-46d8-4f07-9bbb-239a226f2d04 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.179550] env[68674]: DEBUG oslo_vmware.api [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240753, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.291506] env[68674]: DEBUG oslo_vmware.api [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240750, 'name': Rename_Task, 'duration_secs': 0.170627} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.292049] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1072.292358] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-70fa5442-8787-4938-ab49-fc4e4fe3c4f0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.297914] env[68674]: DEBUG oslo_vmware.api [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 1072.297914] env[68674]: value = "task-3240756" [ 1072.297914] env[68674]: _type = "Task" [ 1072.297914] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.305720] env[68674]: DEBUG oslo_vmware.api [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240756, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.620858] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Releasing lock "refresh_cache-4214e971-ca72-4c9f-a355-78e5ad8d8219" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1072.676246] env[68674]: DEBUG oslo_vmware.api [None req-dd3fd1ff-16d9-42f9-8cbb-96f6b2770dd5 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240751, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.682031] env[68674]: DEBUG oslo_vmware.api [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240753, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.687018] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f6575a9-c15d-4c35-8573-84cb69b0bf1d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.693862] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6df36201-0bd1-4aa3-8e19-5f9cbbd2bd1f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.734546] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-626ef779-9785-41fa-8b41-e6072579a7de {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.743330] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7321958c-0e17-4c81-aefe-731056b2f143 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.757902] env[68674]: DEBUG nova.compute.provider_tree [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1072.808634] env[68674]: DEBUG oslo_vmware.api [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240756, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.178256] env[68674]: DEBUG oslo_vmware.api [None req-dd3fd1ff-16d9-42f9-8cbb-96f6b2770dd5 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240751, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.185513] env[68674]: DEBUG oslo_vmware.api [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240753, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.191613] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a542ce64-6c39-4037-bf73-0b5956f7d8ef {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.199917] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e169a853-4098-4f6c-8754-7c2e3e4ac9e3 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Doing hard reboot of VM {{(pid=68674) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1073.200205] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-9c95dfad-1131-486e-960e-173424be2e73 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.206670] env[68674]: DEBUG oslo_vmware.api [None req-e169a853-4098-4f6c-8754-7c2e3e4ac9e3 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Waiting for the task: (returnval){ [ 1073.206670] env[68674]: value = "task-3240757" [ 1073.206670] env[68674]: _type = "Task" [ 1073.206670] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.217710] env[68674]: DEBUG oslo_vmware.api [None req-e169a853-4098-4f6c-8754-7c2e3e4ac9e3 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Task: {'id': task-3240757, 'name': ResetVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.262760] env[68674]: DEBUG nova.scheduler.client.report [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1073.297819] env[68674]: DEBUG oslo_concurrency.lockutils [None req-60a44703-31c9-45d8-b2ca-e5d2b4aab0f8 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "f70145c9-4846-42e1-9c1c-de9759097abd" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1073.298209] env[68674]: DEBUG oslo_concurrency.lockutils [None req-60a44703-31c9-45d8-b2ca-e5d2b4aab0f8 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "f70145c9-4846-42e1-9c1c-de9759097abd" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.319229] env[68674]: DEBUG oslo_vmware.api [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240756, 'name': PowerOnVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.673727] env[68674]: DEBUG oslo_vmware.api [None req-dd3fd1ff-16d9-42f9-8cbb-96f6b2770dd5 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240751, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.684569] env[68674]: DEBUG oslo_vmware.api [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240753, 'name': ReconfigVM_Task, 'duration_secs': 1.152581} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.688070] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Reconfigured VM instance instance-0000006b to attach disk [datastore2] dbbf1313-6e44-45e2-8bf6-83409f06cb4b/dbbf1313-6e44-45e2-8bf6-83409f06cb4b.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1073.688070] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-09600033-dbd9-4647-a6e1-f9e8081b0107 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.697538] env[68674]: DEBUG oslo_vmware.api [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 1073.697538] env[68674]: value = "task-3240758" [ 1073.697538] env[68674]: _type = "Task" [ 1073.697538] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.705965] env[68674]: DEBUG oslo_vmware.api [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240758, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.716059] env[68674]: DEBUG oslo_vmware.api [None req-e169a853-4098-4f6c-8754-7c2e3e4ac9e3 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Task: {'id': task-3240757, 'name': ResetVM_Task, 'duration_secs': 0.105848} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.716928] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e169a853-4098-4f6c-8754-7c2e3e4ac9e3 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Did hard reboot of VM {{(pid=68674) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1073.717267] env[68674]: DEBUG nova.compute.manager [None req-e169a853-4098-4f6c-8754-7c2e3e4ac9e3 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1073.719236] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2d9ead8-2e48-4623-b4c4-2a936a563b93 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.769264] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.368s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1073.769765] env[68674]: DEBUG nova.compute.manager [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1073.772965] env[68674]: DEBUG oslo_concurrency.lockutils [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.220s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.774459] env[68674]: INFO nova.compute.claims [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1073.802142] env[68674]: INFO nova.compute.manager [None req-60a44703-31c9-45d8-b2ca-e5d2b4aab0f8 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Detaching volume 09b1b6e3-ad70-4884-a142-77859302b0e3 [ 1073.814373] env[68674]: DEBUG oslo_vmware.api [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240756, 'name': PowerOnVM_Task, 'duration_secs': 1.122302} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.817614] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1073.820031] env[68674]: INFO nova.compute.manager [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Took 11.67 seconds to spawn the instance on the hypervisor. [ 1073.820031] env[68674]: DEBUG nova.compute.manager [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1073.820031] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-629f4983-9a35-4b97-a712-f176a3ec210a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.864092] env[68674]: INFO nova.virt.block_device [None req-60a44703-31c9-45d8-b2ca-e5d2b4aab0f8 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Attempting to driver detach volume 09b1b6e3-ad70-4884-a142-77859302b0e3 from mountpoint /dev/sdb [ 1073.864092] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-60a44703-31c9-45d8-b2ca-e5d2b4aab0f8 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Volume detach. Driver type: vmdk {{(pid=68674) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1073.864092] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-60a44703-31c9-45d8-b2ca-e5d2b4aab0f8 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647637', 'volume_id': '09b1b6e3-ad70-4884-a142-77859302b0e3', 'name': 'volume-09b1b6e3-ad70-4884-a142-77859302b0e3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f70145c9-4846-42e1-9c1c-de9759097abd', 'attached_at': '', 'detached_at': '', 'volume_id': '09b1b6e3-ad70-4884-a142-77859302b0e3', 'serial': '09b1b6e3-ad70-4884-a142-77859302b0e3'} {{(pid=68674) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1073.865449] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d28041cd-c914-499d-8a4c-9f7afd0c7362 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.888418] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7586ade-4f9c-439f-91d3-58f33becf501 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.896071] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5975b6a5-f3ce-47ac-846d-0b0dc0e9df13 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.918055] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76c0e499-e53e-478b-86b9-70f8b7dc51f7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.938456] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-60a44703-31c9-45d8-b2ca-e5d2b4aab0f8 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] The volume has not been displaced from its original location: [datastore1] volume-09b1b6e3-ad70-4884-a142-77859302b0e3/volume-09b1b6e3-ad70-4884-a142-77859302b0e3.vmdk. No consolidation needed. {{(pid=68674) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1073.942898] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-60a44703-31c9-45d8-b2ca-e5d2b4aab0f8 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Reconfiguring VM instance instance-00000051 to detach disk 2001 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1073.943320] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d46fb7a-7527-4ab0-b1e5-a6dfa418afe5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.961101] env[68674]: DEBUG oslo_vmware.api [None req-60a44703-31c9-45d8-b2ca-e5d2b4aab0f8 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1073.961101] env[68674]: value = "task-3240759" [ 1073.961101] env[68674]: _type = "Task" [ 1073.961101] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.970261] env[68674]: DEBUG oslo_vmware.api [None req-60a44703-31c9-45d8-b2ca-e5d2b4aab0f8 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240759, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.171632] env[68674]: DEBUG oslo_vmware.api [None req-dd3fd1ff-16d9-42f9-8cbb-96f6b2770dd5 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240751, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.179776] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1074.180102] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bc5fdb7b-b422-4f2f-b038-3d287c38e4b0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.186831] env[68674]: DEBUG oslo_vmware.api [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1074.186831] env[68674]: value = "task-3240760" [ 1074.186831] env[68674]: _type = "Task" [ 1074.186831] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.194699] env[68674]: DEBUG oslo_vmware.api [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240760, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.205286] env[68674]: DEBUG oslo_vmware.api [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240758, 'name': Rename_Task, 'duration_secs': 0.270224} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.205634] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1074.205883] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-91ecb5e6-6808-4ef8-b5ce-b944742b4555 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.211850] env[68674]: DEBUG oslo_vmware.api [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 1074.211850] env[68674]: value = "task-3240761" [ 1074.211850] env[68674]: _type = "Task" [ 1074.211850] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.219966] env[68674]: DEBUG oslo_vmware.api [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240761, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.233656] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e169a853-4098-4f6c-8754-7c2e3e4ac9e3 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Lock "cbccde73-b903-47f7-9cbc-f0b376a03435" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.341s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1074.279986] env[68674]: DEBUG nova.compute.utils [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1074.283381] env[68674]: DEBUG nova.compute.manager [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1074.283613] env[68674]: DEBUG nova.network.neutron [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1074.337205] env[68674]: DEBUG nova.policy [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd28e9b76e01f463bbb375cbd9c51684f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '81afe76c94de4e94b53f15af0ef95e66', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 1074.342217] env[68674]: INFO nova.compute.manager [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Took 23.39 seconds to build instance. [ 1074.472342] env[68674]: DEBUG oslo_vmware.api [None req-60a44703-31c9-45d8-b2ca-e5d2b4aab0f8 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240759, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.671957] env[68674]: DEBUG oslo_vmware.api [None req-dd3fd1ff-16d9-42f9-8cbb-96f6b2770dd5 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240751, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.706098] env[68674]: DEBUG oslo_vmware.api [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240760, 'name': PowerOffVM_Task, 'duration_secs': 0.214572} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.706098] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1074.706548] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9104e78d-38af-4629-a2ae-ee6b7ae2f63d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.736548] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a02990df-b7f4-457b-8554-842144e60efb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.743431] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquiring lock "9b8aad00-0980-4752-954a-c09c9ae6f9ec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1074.743612] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lock "9b8aad00-0980-4752-954a-c09c9ae6f9ec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1074.744951] env[68674]: DEBUG oslo_vmware.api [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240761, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.784710] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1074.785322] env[68674]: DEBUG nova.compute.manager [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1074.787891] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1659429e-d934-4416-9698-27e9ff2e1cb6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.800288] env[68674]: DEBUG oslo_vmware.api [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1074.800288] env[68674]: value = "task-3240763" [ 1074.800288] env[68674]: _type = "Task" [ 1074.800288] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.810444] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] VM already powered off {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1074.810673] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1074.811022] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1074.811224] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1074.811360] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1074.817450] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fb3f0a96-395a-4975-a5a8-fcb7c440f83e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.826398] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1074.826398] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1074.826398] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b05bc96-4095-4bd6-b8fd-07be6a99c41f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.837880] env[68674]: DEBUG oslo_vmware.api [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1074.837880] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52e8e9e2-ae86-53b0-d10d-7d0f56a808f9" [ 1074.837880] env[68674]: _type = "Task" [ 1074.837880] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.846241] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dadde2af-c83c-41dc-b85a-b5ebf3c3d49a tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "ba4bfbb4-a89b-4ab6-964e-792647fd5a89" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.899s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1074.846942] env[68674]: DEBUG oslo_vmware.api [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52e8e9e2-ae86-53b0-d10d-7d0f56a808f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.858707] env[68674]: DEBUG nova.network.neutron [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Successfully created port: 9fd41ca9-247f-4f5f-8749-60983c1e212a {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1074.971228] env[68674]: DEBUG oslo_vmware.api [None req-60a44703-31c9-45d8-b2ca-e5d2b4aab0f8 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240759, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.090013] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee507ef1-6180-4844-a04c-84370d154b81 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.099096] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42676507-13c9-4421-9ca8-970834665d36 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.133071] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9b50f90-fc8b-4692-af92-2f3953212a93 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.140225] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3cc6d0f-b924-4555-b463-bad13e2948cf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.158454] env[68674]: DEBUG nova.compute.provider_tree [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1075.170830] env[68674]: DEBUG oslo_vmware.api [None req-dd3fd1ff-16d9-42f9-8cbb-96f6b2770dd5 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240751, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.188353] env[68674]: DEBUG oslo_concurrency.lockutils [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Acquiring lock "cbccde73-b903-47f7-9cbc-f0b376a03435" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1075.188708] env[68674]: DEBUG oslo_concurrency.lockutils [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Lock "cbccde73-b903-47f7-9cbc-f0b376a03435" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1075.188981] env[68674]: DEBUG oslo_concurrency.lockutils [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Acquiring lock "cbccde73-b903-47f7-9cbc-f0b376a03435-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1075.189534] env[68674]: DEBUG oslo_concurrency.lockutils [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Lock "cbccde73-b903-47f7-9cbc-f0b376a03435-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1075.189534] env[68674]: DEBUG oslo_concurrency.lockutils [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Lock "cbccde73-b903-47f7-9cbc-f0b376a03435-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1075.192883] env[68674]: INFO nova.compute.manager [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Terminating instance [ 1075.224956] env[68674]: DEBUG oslo_vmware.api [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240761, 'name': PowerOnVM_Task, 'duration_secs': 0.680676} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.225495] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1075.225617] env[68674]: INFO nova.compute.manager [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Took 8.91 seconds to spawn the instance on the hypervisor. [ 1075.225856] env[68674]: DEBUG nova.compute.manager [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1075.227257] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d5a0a1c-8d6c-4bca-9e3f-d2f4100a4d8c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.245791] env[68674]: DEBUG nova.compute.manager [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1075.348346] env[68674]: DEBUG oslo_vmware.api [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52e8e9e2-ae86-53b0-d10d-7d0f56a808f9, 'name': SearchDatastore_Task, 'duration_secs': 0.010015} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.349256] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59580c69-00d7-4985-9a4d-4818b2610b96 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.354852] env[68674]: DEBUG oslo_vmware.api [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1075.354852] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52629b01-62c6-9326-119c-73f9ec30942b" [ 1075.354852] env[68674]: _type = "Task" [ 1075.354852] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.363899] env[68674]: DEBUG oslo_vmware.api [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52629b01-62c6-9326-119c-73f9ec30942b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.446112] env[68674]: DEBUG nova.compute.manager [req-057e3863-60f1-446b-9a7d-0e46b9fd29ab req-ba6d847e-7a48-4b85-b72a-249f538764ea service nova] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Received event network-changed-7309bd2b-c077-4257-8efb-bf6e8d516ab7 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1075.446310] env[68674]: DEBUG nova.compute.manager [req-057e3863-60f1-446b-9a7d-0e46b9fd29ab req-ba6d847e-7a48-4b85-b72a-249f538764ea service nova] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Refreshing instance network info cache due to event network-changed-7309bd2b-c077-4257-8efb-bf6e8d516ab7. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1075.446938] env[68674]: DEBUG oslo_concurrency.lockutils [req-057e3863-60f1-446b-9a7d-0e46b9fd29ab req-ba6d847e-7a48-4b85-b72a-249f538764ea service nova] Acquiring lock "refresh_cache-ba4bfbb4-a89b-4ab6-964e-792647fd5a89" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1075.446938] env[68674]: DEBUG oslo_concurrency.lockutils [req-057e3863-60f1-446b-9a7d-0e46b9fd29ab req-ba6d847e-7a48-4b85-b72a-249f538764ea service nova] Acquired lock "refresh_cache-ba4bfbb4-a89b-4ab6-964e-792647fd5a89" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1075.446938] env[68674]: DEBUG nova.network.neutron [req-057e3863-60f1-446b-9a7d-0e46b9fd29ab req-ba6d847e-7a48-4b85-b72a-249f538764ea service nova] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Refreshing network info cache for port 7309bd2b-c077-4257-8efb-bf6e8d516ab7 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1075.471753] env[68674]: DEBUG oslo_vmware.api [None req-60a44703-31c9-45d8-b2ca-e5d2b4aab0f8 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240759, 'name': ReconfigVM_Task, 'duration_secs': 1.367106} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.472823] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-60a44703-31c9-45d8-b2ca-e5d2b4aab0f8 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Reconfigured VM instance instance-00000051 to detach disk 2001 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1075.476846] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-62f38316-5b17-4921-a91e-cbdb9ea4d0d0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.493554] env[68674]: DEBUG oslo_vmware.api [None req-60a44703-31c9-45d8-b2ca-e5d2b4aab0f8 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1075.493554] env[68674]: value = "task-3240764" [ 1075.493554] env[68674]: _type = "Task" [ 1075.493554] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.501267] env[68674]: DEBUG oslo_vmware.api [None req-60a44703-31c9-45d8-b2ca-e5d2b4aab0f8 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240764, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.662293] env[68674]: DEBUG nova.scheduler.client.report [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1075.677555] env[68674]: DEBUG oslo_vmware.api [None req-dd3fd1ff-16d9-42f9-8cbb-96f6b2770dd5 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240751, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.700506] env[68674]: DEBUG nova.compute.manager [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1075.700706] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1075.701769] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-276c2da4-39e1-4a3e-a018-4c1f20de4247 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.711195] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1075.711441] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-120a54a1-5777-4ebb-8b79-292114da93f1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.717668] env[68674]: DEBUG oslo_vmware.api [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Waiting for the task: (returnval){ [ 1075.717668] env[68674]: value = "task-3240765" [ 1075.717668] env[68674]: _type = "Task" [ 1075.717668] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.725197] env[68674]: DEBUG oslo_vmware.api [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Task: {'id': task-3240765, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.745578] env[68674]: INFO nova.compute.manager [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Took 20.56 seconds to build instance. [ 1075.767956] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1075.800291] env[68674]: DEBUG nova.compute.manager [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1075.830279] env[68674]: DEBUG nova.virt.hardware [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1075.830570] env[68674]: DEBUG nova.virt.hardware [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1075.830739] env[68674]: DEBUG nova.virt.hardware [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1075.830898] env[68674]: DEBUG nova.virt.hardware [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1075.831063] env[68674]: DEBUG nova.virt.hardware [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1075.831214] env[68674]: DEBUG nova.virt.hardware [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1075.831438] env[68674]: DEBUG nova.virt.hardware [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1075.831607] env[68674]: DEBUG nova.virt.hardware [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1075.831775] env[68674]: DEBUG nova.virt.hardware [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1075.831946] env[68674]: DEBUG nova.virt.hardware [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1075.832127] env[68674]: DEBUG nova.virt.hardware [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1075.833027] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43e734eb-f5ee-4ede-91fd-39b8a6f313b2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.841547] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f7b220e-2098-477d-a282-284ebcf97447 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.867281] env[68674]: DEBUG oslo_vmware.api [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52629b01-62c6-9326-119c-73f9ec30942b, 'name': SearchDatastore_Task, 'duration_secs': 0.009139} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.867549] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1075.867808] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 4214e971-ca72-4c9f-a355-78e5ad8d8219/b84d9354-ef6b-46ca-9dae-6549fa89bbea-rescue.vmdk. {{(pid=68674) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1075.868095] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d83ce511-262e-472f-80e9-8a609b8c5a89 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.875258] env[68674]: DEBUG oslo_vmware.api [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1075.875258] env[68674]: value = "task-3240766" [ 1075.875258] env[68674]: _type = "Task" [ 1075.875258] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.883145] env[68674]: DEBUG oslo_vmware.api [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240766, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.009748] env[68674]: DEBUG oslo_vmware.api [None req-60a44703-31c9-45d8-b2ca-e5d2b4aab0f8 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240764, 'name': ReconfigVM_Task, 'duration_secs': 0.157111} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.010308] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-60a44703-31c9-45d8-b2ca-e5d2b4aab0f8 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647637', 'volume_id': '09b1b6e3-ad70-4884-a142-77859302b0e3', 'name': 'volume-09b1b6e3-ad70-4884-a142-77859302b0e3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f70145c9-4846-42e1-9c1c-de9759097abd', 'attached_at': '', 'detached_at': '', 'volume_id': '09b1b6e3-ad70-4884-a142-77859302b0e3', 'serial': '09b1b6e3-ad70-4884-a142-77859302b0e3'} {{(pid=68674) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1076.173030] env[68674]: DEBUG oslo_concurrency.lockutils [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.397s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1076.173030] env[68674]: DEBUG nova.compute.manager [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1076.181471] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 5.185s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1076.194243] env[68674]: DEBUG oslo_vmware.api [None req-dd3fd1ff-16d9-42f9-8cbb-96f6b2770dd5 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240751, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.235145] env[68674]: DEBUG oslo_vmware.api [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Task: {'id': task-3240765, 'name': PowerOffVM_Task, 'duration_secs': 0.206549} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.236514] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1076.236871] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1076.239055] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9c53f48c-4628-4a6b-a70a-07791176ef07 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.248675] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ae39eb60-d8b8-4c90-b47c-7c81686cb537 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "dbbf1313-6e44-45e2-8bf6-83409f06cb4b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.092s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1076.282329] env[68674]: DEBUG nova.network.neutron [req-057e3863-60f1-446b-9a7d-0e46b9fd29ab req-ba6d847e-7a48-4b85-b72a-249f538764ea service nova] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Updated VIF entry in instance network info cache for port 7309bd2b-c077-4257-8efb-bf6e8d516ab7. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1076.282909] env[68674]: DEBUG nova.network.neutron [req-057e3863-60f1-446b-9a7d-0e46b9fd29ab req-ba6d847e-7a48-4b85-b72a-249f538764ea service nova] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Updating instance_info_cache with network_info: [{"id": "7309bd2b-c077-4257-8efb-bf6e8d516ab7", "address": "fa:16:3e:e4:5d:a7", "network": {"id": "e4b29de6-94e6-452e-b362-eb8d7dd615b9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2121858122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2bca98e5a30741249b1bdee899ffe433", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721c6720-3ce0-450e-9951-a894f03acc27", "external-id": "nsx-vlan-transportzone-394", "segmentation_id": 394, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7309bd2b-c0", "ovs_interfaceid": "7309bd2b-c077-4257-8efb-bf6e8d516ab7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1076.317088] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1076.317088] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1076.317088] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Deleting the datastore file [datastore1] cbccde73-b903-47f7-9cbc-f0b376a03435 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1076.317088] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-63f7a704-77ed-4ac9-b14d-d39a723fe48e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.328212] env[68674]: DEBUG oslo_vmware.api [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Waiting for the task: (returnval){ [ 1076.328212] env[68674]: value = "task-3240768" [ 1076.328212] env[68674]: _type = "Task" [ 1076.328212] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.340529] env[68674]: DEBUG oslo_vmware.api [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Task: {'id': task-3240768, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.391053] env[68674]: DEBUG oslo_vmware.api [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240766, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.555198] env[68674]: DEBUG nova.objects.instance [None req-60a44703-31c9-45d8-b2ca-e5d2b4aab0f8 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lazy-loading 'flavor' on Instance uuid f70145c9-4846-42e1-9c1c-de9759097abd {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1076.561603] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3fb9949-593a-4066-beb4-9607af99ea63 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Volume attach. Driver type: vmdk {{(pid=68674) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1076.561903] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3fb9949-593a-4066-beb4-9607af99ea63 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647679', 'volume_id': '45dbff42-8058-475f-b6ef-88e2d283c59a', 'name': 'volume-45dbff42-8058-475f-b6ef-88e2d283c59a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '142e8ede-90e2-47cf-a1b1-8c4fd59eed0a', 'attached_at': '', 'detached_at': '', 'volume_id': '45dbff42-8058-475f-b6ef-88e2d283c59a', 'serial': '45dbff42-8058-475f-b6ef-88e2d283c59a'} {{(pid=68674) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1076.562827] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13740c6b-5479-46b8-afa8-7608fbf9b925 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.583083] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc89f4f6-f8c9-42f7-be3a-0268ba217f5f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.610820] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3fb9949-593a-4066-beb4-9607af99ea63 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Reconfiguring VM instance instance-00000063 to attach disk [datastore2] volume-45dbff42-8058-475f-b6ef-88e2d283c59a/volume-45dbff42-8058-475f-b6ef-88e2d283c59a.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1076.611483] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-71428161-ce85-4698-ae31-857cb2a58617 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.630816] env[68674]: DEBUG oslo_vmware.api [None req-a3fb9949-593a-4066-beb4-9607af99ea63 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for the task: (returnval){ [ 1076.630816] env[68674]: value = "task-3240769" [ 1076.630816] env[68674]: _type = "Task" [ 1076.630816] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.640514] env[68674]: DEBUG oslo_vmware.api [None req-a3fb9949-593a-4066-beb4-9607af99ea63 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3240769, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.678959] env[68674]: DEBUG oslo_vmware.api [None req-dd3fd1ff-16d9-42f9-8cbb-96f6b2770dd5 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240751, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.687019] env[68674]: DEBUG nova.compute.utils [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1076.687019] env[68674]: DEBUG nova.compute.manager [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1076.687019] env[68674]: DEBUG nova.network.neutron [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1076.689256] env[68674]: DEBUG nova.objects.instance [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lazy-loading 'migration_context' on Instance uuid 2a7a6269-65a8-402c-b174-a4a46d20a33a {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1076.717611] env[68674]: DEBUG nova.network.neutron [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Successfully updated port: 9fd41ca9-247f-4f5f-8749-60983c1e212a {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1076.754541] env[68674]: DEBUG nova.policy [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e268da8edd47413b9b87909dde064f64', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0cee54e456084086866d08b098a24b64', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 1076.786818] env[68674]: DEBUG oslo_concurrency.lockutils [req-057e3863-60f1-446b-9a7d-0e46b9fd29ab req-ba6d847e-7a48-4b85-b72a-249f538764ea service nova] Releasing lock "refresh_cache-ba4bfbb4-a89b-4ab6-964e-792647fd5a89" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1076.838160] env[68674]: DEBUG oslo_vmware.api [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Task: {'id': task-3240768, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.296112} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.838455] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1076.838664] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1076.838853] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1076.839042] env[68674]: INFO nova.compute.manager [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1076.839291] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1076.839477] env[68674]: DEBUG nova.compute.manager [-] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1076.839723] env[68674]: DEBUG nova.network.neutron [-] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1076.886422] env[68674]: DEBUG oslo_vmware.api [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240766, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.515842} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.886765] env[68674]: INFO nova.virt.vmwareapi.ds_util [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 4214e971-ca72-4c9f-a355-78e5ad8d8219/b84d9354-ef6b-46ca-9dae-6549fa89bbea-rescue.vmdk. [ 1076.887747] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61363883-f35d-4ab6-8bf2-bb9f73c5d07b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.921351] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] 4214e971-ca72-4c9f-a355-78e5ad8d8219/b84d9354-ef6b-46ca-9dae-6549fa89bbea-rescue.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1076.922427] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5843933d-83fb-4cea-bbf1-55e7c36e06c1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.941488] env[68674]: DEBUG oslo_vmware.api [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1076.941488] env[68674]: value = "task-3240770" [ 1076.941488] env[68674]: _type = "Task" [ 1076.941488] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.949931] env[68674]: DEBUG oslo_vmware.api [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240770, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.970304] env[68674]: DEBUG nova.compute.manager [req-e2d2251c-c8f5-4612-924e-c4c566c8266f req-30e05fc7-2129-4141-8bcc-7deef8123b70 service nova] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Received event network-changed-7111fb79-ad70-4af7-9c47-0e2443a51a32 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1076.970304] env[68674]: DEBUG nova.compute.manager [req-e2d2251c-c8f5-4612-924e-c4c566c8266f req-30e05fc7-2129-4141-8bcc-7deef8123b70 service nova] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Refreshing instance network info cache due to event network-changed-7111fb79-ad70-4af7-9c47-0e2443a51a32. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1076.970304] env[68674]: DEBUG oslo_concurrency.lockutils [req-e2d2251c-c8f5-4612-924e-c4c566c8266f req-30e05fc7-2129-4141-8bcc-7deef8123b70 service nova] Acquiring lock "refresh_cache-dbbf1313-6e44-45e2-8bf6-83409f06cb4b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1076.970304] env[68674]: DEBUG oslo_concurrency.lockutils [req-e2d2251c-c8f5-4612-924e-c4c566c8266f req-30e05fc7-2129-4141-8bcc-7deef8123b70 service nova] Acquired lock "refresh_cache-dbbf1313-6e44-45e2-8bf6-83409f06cb4b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1076.970304] env[68674]: DEBUG nova.network.neutron [req-e2d2251c-c8f5-4612-924e-c4c566c8266f req-30e05fc7-2129-4141-8bcc-7deef8123b70 service nova] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Refreshing network info cache for port 7111fb79-ad70-4af7-9c47-0e2443a51a32 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1077.140905] env[68674]: DEBUG oslo_vmware.api [None req-a3fb9949-593a-4066-beb4-9607af99ea63 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3240769, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.141701] env[68674]: DEBUG nova.network.neutron [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Successfully created port: 01a0c28d-5fb2-423a-a0d2-6ff82d2c0f17 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1077.180570] env[68674]: DEBUG oslo_vmware.api [None req-dd3fd1ff-16d9-42f9-8cbb-96f6b2770dd5 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240751, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.191297] env[68674]: DEBUG nova.compute.manager [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1077.220321] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "refresh_cache-95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1077.220479] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquired lock "refresh_cache-95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1077.220809] env[68674]: DEBUG nova.network.neutron [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1077.455199] env[68674]: DEBUG oslo_vmware.api [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240770, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.476937] env[68674]: DEBUG nova.compute.manager [req-0147662d-f31b-45bf-a2c2-d47c5ef287ce req-f0b3d257-a4eb-4e1e-9bd2-b61344343144 service nova] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Received event network-vif-plugged-9fd41ca9-247f-4f5f-8749-60983c1e212a {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1077.477178] env[68674]: DEBUG oslo_concurrency.lockutils [req-0147662d-f31b-45bf-a2c2-d47c5ef287ce req-f0b3d257-a4eb-4e1e-9bd2-b61344343144 service nova] Acquiring lock "95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1077.477396] env[68674]: DEBUG oslo_concurrency.lockutils [req-0147662d-f31b-45bf-a2c2-d47c5ef287ce req-f0b3d257-a4eb-4e1e-9bd2-b61344343144 service nova] Lock "95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1077.477567] env[68674]: DEBUG oslo_concurrency.lockutils [req-0147662d-f31b-45bf-a2c2-d47c5ef287ce req-f0b3d257-a4eb-4e1e-9bd2-b61344343144 service nova] Lock "95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1077.477733] env[68674]: DEBUG nova.compute.manager [req-0147662d-f31b-45bf-a2c2-d47c5ef287ce req-f0b3d257-a4eb-4e1e-9bd2-b61344343144 service nova] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] No waiting events found dispatching network-vif-plugged-9fd41ca9-247f-4f5f-8749-60983c1e212a {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1077.477896] env[68674]: WARNING nova.compute.manager [req-0147662d-f31b-45bf-a2c2-d47c5ef287ce req-f0b3d257-a4eb-4e1e-9bd2-b61344343144 service nova] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Received unexpected event network-vif-plugged-9fd41ca9-247f-4f5f-8749-60983c1e212a for instance with vm_state building and task_state spawning. [ 1077.478068] env[68674]: DEBUG nova.compute.manager [req-0147662d-f31b-45bf-a2c2-d47c5ef287ce req-f0b3d257-a4eb-4e1e-9bd2-b61344343144 service nova] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Received event network-changed-9fd41ca9-247f-4f5f-8749-60983c1e212a {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1077.478225] env[68674]: DEBUG nova.compute.manager [req-0147662d-f31b-45bf-a2c2-d47c5ef287ce req-f0b3d257-a4eb-4e1e-9bd2-b61344343144 service nova] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Refreshing instance network info cache due to event network-changed-9fd41ca9-247f-4f5f-8749-60983c1e212a. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1077.478405] env[68674]: DEBUG oslo_concurrency.lockutils [req-0147662d-f31b-45bf-a2c2-d47c5ef287ce req-f0b3d257-a4eb-4e1e-9bd2-b61344343144 service nova] Acquiring lock "refresh_cache-95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1077.483200] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09057bb6-6b8d-446c-98de-7aca62ccf397 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.490567] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6164ca12-4edd-463e-8920-d599e9a7d566 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.529509] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66bfd61b-e7fc-4105-bd61-cdd11e8fbc36 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.537250] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bae75b7-c5f0-4f6e-b80f-26caf49c7a9c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.554517] env[68674]: DEBUG nova.network.neutron [-] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1077.557629] env[68674]: DEBUG nova.compute.provider_tree [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1077.561834] env[68674]: DEBUG oslo_concurrency.lockutils [None req-60a44703-31c9-45d8-b2ca-e5d2b4aab0f8 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "f70145c9-4846-42e1-9c1c-de9759097abd" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.263s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1077.641425] env[68674]: DEBUG oslo_vmware.api [None req-a3fb9949-593a-4066-beb4-9607af99ea63 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3240769, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.680984] env[68674]: DEBUG oslo_vmware.api [None req-dd3fd1ff-16d9-42f9-8cbb-96f6b2770dd5 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240751, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.714808] env[68674]: DEBUG nova.network.neutron [req-e2d2251c-c8f5-4612-924e-c4c566c8266f req-30e05fc7-2129-4141-8bcc-7deef8123b70 service nova] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Updated VIF entry in instance network info cache for port 7111fb79-ad70-4af7-9c47-0e2443a51a32. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1077.715162] env[68674]: DEBUG nova.network.neutron [req-e2d2251c-c8f5-4612-924e-c4c566c8266f req-30e05fc7-2129-4141-8bcc-7deef8123b70 service nova] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Updating instance_info_cache with network_info: [{"id": "7111fb79-ad70-4af7-9c47-0e2443a51a32", "address": "fa:16:3e:b5:d6:a6", "network": {"id": "eae87694-bbf6-4eed-9305-26be80e0529b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1262353116-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c958fcb56a934ef7919b76aa2a193429", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7111fb79-ad", "ovs_interfaceid": "7111fb79-ad70-4af7-9c47-0e2443a51a32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1077.752438] env[68674]: DEBUG nova.network.neutron [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1077.895193] env[68674]: DEBUG nova.network.neutron [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Updating instance_info_cache with network_info: [{"id": "9fd41ca9-247f-4f5f-8749-60983c1e212a", "address": "fa:16:3e:c2:2a:96", "network": {"id": "14f41484-287c-4789-9e0c-fcc5f0e92e0d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-787923662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81afe76c94de4e94b53f15af0ef95e66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "877ed63d-906e-4bd5-a1fc-7e82d172d41e", "external-id": "nsx-vlan-transportzone-642", "segmentation_id": 642, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9fd41ca9-24", "ovs_interfaceid": "9fd41ca9-247f-4f5f-8749-60983c1e212a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1077.953566] env[68674]: DEBUG oslo_vmware.api [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240770, 'name': ReconfigVM_Task, 'duration_secs': 0.909787} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.953858] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Reconfigured VM instance instance-00000069 to attach disk [datastore1] 4214e971-ca72-4c9f-a355-78e5ad8d8219/b84d9354-ef6b-46ca-9dae-6549fa89bbea-rescue.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1077.954709] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce0959ba-ed21-4029-91b8-d51baff09e62 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.979884] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3cdf67a0-8cc8-47de-ad60-ec5839b0c722 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.995407] env[68674]: DEBUG oslo_vmware.api [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1077.995407] env[68674]: value = "task-3240771" [ 1077.995407] env[68674]: _type = "Task" [ 1077.995407] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.003052] env[68674]: DEBUG oslo_vmware.api [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240771, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.058965] env[68674]: INFO nova.compute.manager [-] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Took 1.22 seconds to deallocate network for instance. [ 1078.065865] env[68674]: DEBUG nova.scheduler.client.report [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1078.142581] env[68674]: DEBUG oslo_vmware.api [None req-a3fb9949-593a-4066-beb4-9607af99ea63 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3240769, 'name': ReconfigVM_Task, 'duration_secs': 1.21996} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.142887] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3fb9949-593a-4066-beb4-9607af99ea63 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Reconfigured VM instance instance-00000063 to attach disk [datastore2] volume-45dbff42-8058-475f-b6ef-88e2d283c59a/volume-45dbff42-8058-475f-b6ef-88e2d283c59a.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1078.149854] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-434bfc31-7093-480d-869b-16aec4f0a1f4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.170041] env[68674]: DEBUG oslo_vmware.api [None req-a3fb9949-593a-4066-beb4-9607af99ea63 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for the task: (returnval){ [ 1078.170041] env[68674]: value = "task-3240773" [ 1078.170041] env[68674]: _type = "Task" [ 1078.170041] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.183307] env[68674]: DEBUG oslo_vmware.api [None req-a3fb9949-593a-4066-beb4-9607af99ea63 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3240773, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.187485] env[68674]: DEBUG oslo_vmware.api [None req-dd3fd1ff-16d9-42f9-8cbb-96f6b2770dd5 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240751, 'name': ReconfigVM_Task, 'duration_secs': 5.79749} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.187874] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dd3fd1ff-16d9-42f9-8cbb-96f6b2770dd5 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Releasing lock "66f4ab32-ef66-4d1d-93b6-775d59ce3c41" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1078.187996] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3fd1ff-16d9-42f9-8cbb-96f6b2770dd5 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Reconfigured VM to detach interface {{(pid=68674) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1078.203682] env[68674]: DEBUG nova.compute.manager [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1078.217854] env[68674]: DEBUG oslo_concurrency.lockutils [req-e2d2251c-c8f5-4612-924e-c4c566c8266f req-30e05fc7-2129-4141-8bcc-7deef8123b70 service nova] Releasing lock "refresh_cache-dbbf1313-6e44-45e2-8bf6-83409f06cb4b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1078.226916] env[68674]: DEBUG nova.virt.hardware [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1078.227184] env[68674]: DEBUG nova.virt.hardware [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1078.227352] env[68674]: DEBUG nova.virt.hardware [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1078.227603] env[68674]: DEBUG nova.virt.hardware [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1078.227769] env[68674]: DEBUG nova.virt.hardware [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1078.227921] env[68674]: DEBUG nova.virt.hardware [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1078.228148] env[68674]: DEBUG nova.virt.hardware [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1078.228314] env[68674]: DEBUG nova.virt.hardware [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1078.228505] env[68674]: DEBUG nova.virt.hardware [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1078.228673] env[68674]: DEBUG nova.virt.hardware [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1078.228848] env[68674]: DEBUG nova.virt.hardware [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1078.230148] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03207b21-d6e4-4af9-9dfa-4ae53ef2999e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.242808] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae30164-fa79-4b88-9793-6a5911b83d3b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.398134] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Releasing lock "refresh_cache-95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1078.398491] env[68674]: DEBUG nova.compute.manager [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Instance network_info: |[{"id": "9fd41ca9-247f-4f5f-8749-60983c1e212a", "address": "fa:16:3e:c2:2a:96", "network": {"id": "14f41484-287c-4789-9e0c-fcc5f0e92e0d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-787923662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81afe76c94de4e94b53f15af0ef95e66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "877ed63d-906e-4bd5-a1fc-7e82d172d41e", "external-id": "nsx-vlan-transportzone-642", "segmentation_id": 642, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9fd41ca9-24", "ovs_interfaceid": "9fd41ca9-247f-4f5f-8749-60983c1e212a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1078.398814] env[68674]: DEBUG oslo_concurrency.lockutils [req-0147662d-f31b-45bf-a2c2-d47c5ef287ce req-f0b3d257-a4eb-4e1e-9bd2-b61344343144 service nova] Acquired lock "refresh_cache-95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1078.398997] env[68674]: DEBUG nova.network.neutron [req-0147662d-f31b-45bf-a2c2-d47c5ef287ce req-f0b3d257-a4eb-4e1e-9bd2-b61344343144 service nova] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Refreshing network info cache for port 9fd41ca9-247f-4f5f-8749-60983c1e212a {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1078.400242] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:2a:96', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '877ed63d-906e-4bd5-a1fc-7e82d172d41e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9fd41ca9-247f-4f5f-8749-60983c1e212a', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1078.411735] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1078.412213] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1078.413029] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f36fdde9-fd9e-4628-bf12-19d6af2161c9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.432469] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1078.432469] env[68674]: value = "task-3240774" [ 1078.432469] env[68674]: _type = "Task" [ 1078.432469] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.440460] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240774, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.505421] env[68674]: DEBUG oslo_vmware.api [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240771, 'name': ReconfigVM_Task, 'duration_secs': 0.26104} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.505421] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1078.505643] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-05b459a3-2c08-4d7a-92d8-084368474b22 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.512833] env[68674]: DEBUG oslo_vmware.api [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1078.512833] env[68674]: value = "task-3240775" [ 1078.512833] env[68674]: _type = "Task" [ 1078.512833] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.521244] env[68674]: DEBUG oslo_vmware.api [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240775, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.572481] env[68674]: DEBUG oslo_concurrency.lockutils [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1078.665275] env[68674]: DEBUG oslo_concurrency.lockutils [None req-807af09e-3a31-414a-a4a3-214b0209fc64 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "f70145c9-4846-42e1-9c1c-de9759097abd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1078.665942] env[68674]: DEBUG oslo_concurrency.lockutils [None req-807af09e-3a31-414a-a4a3-214b0209fc64 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "f70145c9-4846-42e1-9c1c-de9759097abd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1078.665942] env[68674]: DEBUG oslo_concurrency.lockutils [None req-807af09e-3a31-414a-a4a3-214b0209fc64 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "f70145c9-4846-42e1-9c1c-de9759097abd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1078.666166] env[68674]: DEBUG oslo_concurrency.lockutils [None req-807af09e-3a31-414a-a4a3-214b0209fc64 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "f70145c9-4846-42e1-9c1c-de9759097abd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1078.666234] env[68674]: DEBUG oslo_concurrency.lockutils [None req-807af09e-3a31-414a-a4a3-214b0209fc64 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "f70145c9-4846-42e1-9c1c-de9759097abd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1078.668573] env[68674]: INFO nova.compute.manager [None req-807af09e-3a31-414a-a4a3-214b0209fc64 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Terminating instance [ 1078.680946] env[68674]: DEBUG oslo_vmware.api [None req-a3fb9949-593a-4066-beb4-9607af99ea63 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3240773, 'name': ReconfigVM_Task, 'duration_secs': 0.185586} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.681797] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3fb9949-593a-4066-beb4-9607af99ea63 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647679', 'volume_id': '45dbff42-8058-475f-b6ef-88e2d283c59a', 'name': 'volume-45dbff42-8058-475f-b6ef-88e2d283c59a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '142e8ede-90e2-47cf-a1b1-8c4fd59eed0a', 'attached_at': '', 'detached_at': '', 'volume_id': '45dbff42-8058-475f-b6ef-88e2d283c59a', 'serial': '45dbff42-8058-475f-b6ef-88e2d283c59a'} {{(pid=68674) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1078.777929] env[68674]: DEBUG nova.network.neutron [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Successfully updated port: 01a0c28d-5fb2-423a-a0d2-6ff82d2c0f17 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1078.943728] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240774, 'name': CreateVM_Task, 'duration_secs': 0.429194} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.944223] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1078.945205] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1078.945205] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1078.945205] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1078.945499] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9caa5c59-ff1e-4f03-827e-a6bde1142791 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.950899] env[68674]: DEBUG oslo_vmware.api [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1078.950899] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524f8a87-1a4b-1741-6518-21a13e6af76f" [ 1078.950899] env[68674]: _type = "Task" [ 1078.950899] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.960511] env[68674]: DEBUG oslo_vmware.api [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524f8a87-1a4b-1741-6518-21a13e6af76f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.011515] env[68674]: DEBUG nova.compute.manager [req-02fa6fcb-d9f7-4db7-b26d-264ec0dac2f7 req-c3b25c86-f59e-4699-87f9-13de4f30a738 service nova] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Received event network-vif-plugged-01a0c28d-5fb2-423a-a0d2-6ff82d2c0f17 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1079.011515] env[68674]: DEBUG oslo_concurrency.lockutils [req-02fa6fcb-d9f7-4db7-b26d-264ec0dac2f7 req-c3b25c86-f59e-4699-87f9-13de4f30a738 service nova] Acquiring lock "caed484b-6fb0-41f2-a35f-8f85117dcf15-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1079.011515] env[68674]: DEBUG oslo_concurrency.lockutils [req-02fa6fcb-d9f7-4db7-b26d-264ec0dac2f7 req-c3b25c86-f59e-4699-87f9-13de4f30a738 service nova] Lock "caed484b-6fb0-41f2-a35f-8f85117dcf15-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1079.011515] env[68674]: DEBUG oslo_concurrency.lockutils [req-02fa6fcb-d9f7-4db7-b26d-264ec0dac2f7 req-c3b25c86-f59e-4699-87f9-13de4f30a738 service nova] Lock "caed484b-6fb0-41f2-a35f-8f85117dcf15-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1079.011515] env[68674]: DEBUG nova.compute.manager [req-02fa6fcb-d9f7-4db7-b26d-264ec0dac2f7 req-c3b25c86-f59e-4699-87f9-13de4f30a738 service nova] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] No waiting events found dispatching network-vif-plugged-01a0c28d-5fb2-423a-a0d2-6ff82d2c0f17 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1079.011515] env[68674]: WARNING nova.compute.manager [req-02fa6fcb-d9f7-4db7-b26d-264ec0dac2f7 req-c3b25c86-f59e-4699-87f9-13de4f30a738 service nova] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Received unexpected event network-vif-plugged-01a0c28d-5fb2-423a-a0d2-6ff82d2c0f17 for instance with vm_state building and task_state spawning. [ 1079.011515] env[68674]: DEBUG nova.compute.manager [req-02fa6fcb-d9f7-4db7-b26d-264ec0dac2f7 req-c3b25c86-f59e-4699-87f9-13de4f30a738 service nova] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Received event network-changed-01a0c28d-5fb2-423a-a0d2-6ff82d2c0f17 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1079.011515] env[68674]: DEBUG nova.compute.manager [req-02fa6fcb-d9f7-4db7-b26d-264ec0dac2f7 req-c3b25c86-f59e-4699-87f9-13de4f30a738 service nova] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Refreshing instance network info cache due to event network-changed-01a0c28d-5fb2-423a-a0d2-6ff82d2c0f17. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1079.011515] env[68674]: DEBUG oslo_concurrency.lockutils [req-02fa6fcb-d9f7-4db7-b26d-264ec0dac2f7 req-c3b25c86-f59e-4699-87f9-13de4f30a738 service nova] Acquiring lock "refresh_cache-caed484b-6fb0-41f2-a35f-8f85117dcf15" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.011515] env[68674]: DEBUG oslo_concurrency.lockutils [req-02fa6fcb-d9f7-4db7-b26d-264ec0dac2f7 req-c3b25c86-f59e-4699-87f9-13de4f30a738 service nova] Acquired lock "refresh_cache-caed484b-6fb0-41f2-a35f-8f85117dcf15" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1079.011515] env[68674]: DEBUG nova.network.neutron [req-02fa6fcb-d9f7-4db7-b26d-264ec0dac2f7 req-c3b25c86-f59e-4699-87f9-13de4f30a738 service nova] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Refreshing network info cache for port 01a0c28d-5fb2-423a-a0d2-6ff82d2c0f17 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1079.025872] env[68674]: DEBUG oslo_vmware.api [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240775, 'name': PowerOnVM_Task} progress is 90%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.079556] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.899s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1079.085492] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.318s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1079.086874] env[68674]: INFO nova.compute.claims [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1079.154939] env[68674]: DEBUG nova.network.neutron [req-0147662d-f31b-45bf-a2c2-d47c5ef287ce req-f0b3d257-a4eb-4e1e-9bd2-b61344343144 service nova] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Updated VIF entry in instance network info cache for port 9fd41ca9-247f-4f5f-8749-60983c1e212a. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1079.155360] env[68674]: DEBUG nova.network.neutron [req-0147662d-f31b-45bf-a2c2-d47c5ef287ce req-f0b3d257-a4eb-4e1e-9bd2-b61344343144 service nova] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Updating instance_info_cache with network_info: [{"id": "9fd41ca9-247f-4f5f-8749-60983c1e212a", "address": "fa:16:3e:c2:2a:96", "network": {"id": "14f41484-287c-4789-9e0c-fcc5f0e92e0d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-787923662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81afe76c94de4e94b53f15af0ef95e66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "877ed63d-906e-4bd5-a1fc-7e82d172d41e", "external-id": "nsx-vlan-transportzone-642", "segmentation_id": 642, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9fd41ca9-24", "ovs_interfaceid": "9fd41ca9-247f-4f5f-8749-60983c1e212a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1079.176552] env[68674]: DEBUG nova.compute.manager [None req-807af09e-3a31-414a-a4a3-214b0209fc64 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1079.176766] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-807af09e-3a31-414a-a4a3-214b0209fc64 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1079.177726] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df760f8d-61c3-4ff9-85f2-5267cdabc3c6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.187296] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-807af09e-3a31-414a-a4a3-214b0209fc64 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1079.187983] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5a2a9fb2-7ea5-489b-bfcf-61e597f7c405 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.194060] env[68674]: DEBUG oslo_vmware.api [None req-807af09e-3a31-414a-a4a3-214b0209fc64 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1079.194060] env[68674]: value = "task-3240776" [ 1079.194060] env[68674]: _type = "Task" [ 1079.194060] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.201768] env[68674]: DEBUG oslo_vmware.api [None req-807af09e-3a31-414a-a4a3-214b0209fc64 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240776, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.281175] env[68674]: DEBUG oslo_concurrency.lockutils [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "refresh_cache-caed484b-6fb0-41f2-a35f-8f85117dcf15" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.463954] env[68674]: DEBUG oslo_vmware.api [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524f8a87-1a4b-1741-6518-21a13e6af76f, 'name': SearchDatastore_Task, 'duration_secs': 0.011269} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.464495] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1079.464917] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1079.465404] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.465689] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1079.465920] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1079.466225] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-96bd677e-70e8-490f-8849-79cfc842cab5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.475415] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1079.475603] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1079.476379] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af4e0ec6-bfe4-4a9b-91c3-8472c0be9c6f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.482628] env[68674]: DEBUG oslo_vmware.api [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1079.482628] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524138d9-501a-fb42-bb8c-fdc8e4ad393f" [ 1079.482628] env[68674]: _type = "Task" [ 1079.482628] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.490132] env[68674]: DEBUG oslo_vmware.api [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524138d9-501a-fb42-bb8c-fdc8e4ad393f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.522867] env[68674]: DEBUG oslo_vmware.api [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240775, 'name': PowerOnVM_Task, 'duration_secs': 0.817844} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.523345] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1079.525985] env[68674]: DEBUG nova.compute.manager [None req-c5ce4346-4968-4b55-99ea-23ca5d7885ac tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1079.526811] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60466894-2e9a-4f6e-9fea-79cdc8ffe819 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.543421] env[68674]: DEBUG nova.network.neutron [req-02fa6fcb-d9f7-4db7-b26d-264ec0dac2f7 req-c3b25c86-f59e-4699-87f9-13de4f30a738 service nova] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1079.601951] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dd3fd1ff-16d9-42f9-8cbb-96f6b2770dd5 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "refresh_cache-66f4ab32-ef66-4d1d-93b6-775d59ce3c41" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.605146] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dd3fd1ff-16d9-42f9-8cbb-96f6b2770dd5 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquired lock "refresh_cache-66f4ab32-ef66-4d1d-93b6-775d59ce3c41" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1079.605146] env[68674]: DEBUG nova.network.neutron [None req-dd3fd1ff-16d9-42f9-8cbb-96f6b2770dd5 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1079.648838] env[68674]: DEBUG nova.network.neutron [req-02fa6fcb-d9f7-4db7-b26d-264ec0dac2f7 req-c3b25c86-f59e-4699-87f9-13de4f30a738 service nova] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1079.658156] env[68674]: DEBUG oslo_concurrency.lockutils [req-0147662d-f31b-45bf-a2c2-d47c5ef287ce req-f0b3d257-a4eb-4e1e-9bd2-b61344343144 service nova] Releasing lock "refresh_cache-95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1079.658499] env[68674]: DEBUG nova.compute.manager [req-0147662d-f31b-45bf-a2c2-d47c5ef287ce req-f0b3d257-a4eb-4e1e-9bd2-b61344343144 service nova] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Received event network-vif-deleted-67abafc0-ffa5-4032-b312-9314d5c0e03a {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1079.658780] env[68674]: INFO nova.compute.manager [req-0147662d-f31b-45bf-a2c2-d47c5ef287ce req-f0b3d257-a4eb-4e1e-9bd2-b61344343144 service nova] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Neutron deleted interface 67abafc0-ffa5-4032-b312-9314d5c0e03a; detaching it from the instance and deleting it from the info cache [ 1079.659081] env[68674]: DEBUG nova.network.neutron [req-0147662d-f31b-45bf-a2c2-d47c5ef287ce req-f0b3d257-a4eb-4e1e-9bd2-b61344343144 service nova] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1079.706120] env[68674]: DEBUG oslo_vmware.api [None req-807af09e-3a31-414a-a4a3-214b0209fc64 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240776, 'name': PowerOffVM_Task, 'duration_secs': 0.214829} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.706419] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-807af09e-3a31-414a-a4a3-214b0209fc64 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1079.706907] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-807af09e-3a31-414a-a4a3-214b0209fc64 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1079.706907] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-902d6305-b537-436e-8650-ef26b30eb966 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.722015] env[68674]: DEBUG nova.objects.instance [None req-a3fb9949-593a-4066-beb4-9607af99ea63 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lazy-loading 'flavor' on Instance uuid 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1079.770325] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-807af09e-3a31-414a-a4a3-214b0209fc64 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1079.770658] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-807af09e-3a31-414a-a4a3-214b0209fc64 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1079.770842] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-807af09e-3a31-414a-a4a3-214b0209fc64 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Deleting the datastore file [datastore2] f70145c9-4846-42e1-9c1c-de9759097abd {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1079.771109] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b54626c2-f002-4881-b429-321e817134aa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.777316] env[68674]: DEBUG oslo_vmware.api [None req-807af09e-3a31-414a-a4a3-214b0209fc64 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1079.777316] env[68674]: value = "task-3240778" [ 1079.777316] env[68674]: _type = "Task" [ 1079.777316] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.784863] env[68674]: DEBUG oslo_vmware.api [None req-807af09e-3a31-414a-a4a3-214b0209fc64 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240778, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.993201] env[68674]: DEBUG oslo_vmware.api [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524138d9-501a-fb42-bb8c-fdc8e4ad393f, 'name': SearchDatastore_Task, 'duration_secs': 0.029854} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.993997] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2bc05d7c-6885-4542-8d78-4af3c31fda15 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.999224] env[68674]: DEBUG oslo_vmware.api [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1079.999224] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a5fe26-7549-1acb-1528-ed0bdf6c60ec" [ 1079.999224] env[68674]: _type = "Task" [ 1079.999224] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.006833] env[68674]: DEBUG oslo_vmware.api [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a5fe26-7549-1acb-1528-ed0bdf6c60ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.151787] env[68674]: DEBUG oslo_concurrency.lockutils [req-02fa6fcb-d9f7-4db7-b26d-264ec0dac2f7 req-c3b25c86-f59e-4699-87f9-13de4f30a738 service nova] Releasing lock "refresh_cache-caed484b-6fb0-41f2-a35f-8f85117dcf15" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1080.152225] env[68674]: DEBUG oslo_concurrency.lockutils [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquired lock "refresh_cache-caed484b-6fb0-41f2-a35f-8f85117dcf15" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1080.152382] env[68674]: DEBUG nova.network.neutron [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1080.163187] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d090e969-0c01-47c2-a2b1-8cf1d824672a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.176988] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f044f25f-f0f8-4acc-a789-79821b8243f1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.218499] env[68674]: DEBUG nova.compute.manager [req-0147662d-f31b-45bf-a2c2-d47c5ef287ce req-f0b3d257-a4eb-4e1e-9bd2-b61344343144 service nova] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Detach interface failed, port_id=67abafc0-ffa5-4032-b312-9314d5c0e03a, reason: Instance cbccde73-b903-47f7-9cbc-f0b376a03435 could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1080.229432] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a3fb9949-593a-4066-beb4-9607af99ea63 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.300s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1080.292455] env[68674]: DEBUG oslo_vmware.api [None req-807af09e-3a31-414a-a4a3-214b0209fc64 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240778, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138497} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.293383] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-807af09e-3a31-414a-a4a3-214b0209fc64 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1080.293383] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-807af09e-3a31-414a-a4a3-214b0209fc64 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1080.293383] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-807af09e-3a31-414a-a4a3-214b0209fc64 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1080.294301] env[68674]: INFO nova.compute.manager [None req-807af09e-3a31-414a-a4a3-214b0209fc64 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1080.294301] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-807af09e-3a31-414a-a4a3-214b0209fc64 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1080.294301] env[68674]: DEBUG nova.compute.manager [-] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1080.294301] env[68674]: DEBUG nova.network.neutron [-] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1080.401104] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b83d8321-b75c-414c-b261-a1262f25206c tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquiring lock "142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1080.401362] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b83d8321-b75c-414c-b261-a1262f25206c tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1080.401541] env[68674]: DEBUG nova.compute.manager [None req-b83d8321-b75c-414c-b261-a1262f25206c tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1080.402462] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47748a34-81c7-4b8e-83de-1eb54d9558ff {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.409253] env[68674]: DEBUG nova.compute.manager [None req-b83d8321-b75c-414c-b261-a1262f25206c tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68674) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1080.409805] env[68674]: DEBUG nova.objects.instance [None req-b83d8321-b75c-414c-b261-a1262f25206c tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lazy-loading 'flavor' on Instance uuid 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1080.414947] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71b53886-cb35-4551-9206-e2ca635d1459 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.422425] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7f78555-c3ca-415c-a9eb-3458491a4fdd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.455587] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ba67dc1-3c59-4a84-9161-4204a64a2d38 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.463537] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de740dbb-4e48-4599-8ee9-6d09a66ffc80 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.477555] env[68674]: DEBUG nova.compute.provider_tree [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1080.511020] env[68674]: DEBUG oslo_vmware.api [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a5fe26-7549-1acb-1528-ed0bdf6c60ec, 'name': SearchDatastore_Task, 'duration_secs': 0.009524} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.511020] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1080.511203] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d/95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1080.514373] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-34cd0e01-4fd6-4738-8302-01d27d0ebd1d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.518617] env[68674]: DEBUG oslo_vmware.api [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1080.518617] env[68674]: value = "task-3240779" [ 1080.518617] env[68674]: _type = "Task" [ 1080.518617] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.528124] env[68674]: DEBUG oslo_vmware.api [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240779, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.643098] env[68674]: INFO nova.compute.manager [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Swapping old allocation on dict_keys(['ade3f042-7427-494b-9654-0b65e074850c']) held by migration 0f1a5cb9-1589-4d4f-8424-b56d0d078357 for instance [ 1080.675731] env[68674]: DEBUG nova.scheduler.client.report [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Overwriting current allocation {'allocations': {'ade3f042-7427-494b-9654-0b65e074850c': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 153}}, 'project_id': '61ea6bfeb37d470a970e9c98e4827ade', 'user_id': '3ce343abf0f14bb5b5141c50113ccf6b', 'consumer_generation': 1} on consumer 2a7a6269-65a8-402c-b174-a4a46d20a33a {{(pid=68674) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1080.755206] env[68674]: DEBUG nova.network.neutron [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1080.850226] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "refresh_cache-2a7a6269-65a8-402c-b174-a4a46d20a33a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1080.850226] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquired lock "refresh_cache-2a7a6269-65a8-402c-b174-a4a46d20a33a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1080.850226] env[68674]: DEBUG nova.network.neutron [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1080.893048] env[68674]: INFO nova.network.neutron [None req-dd3fd1ff-16d9-42f9-8cbb-96f6b2770dd5 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Port dc748df7-150d-4b34-a259-782775725005 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1080.893048] env[68674]: DEBUG nova.network.neutron [None req-dd3fd1ff-16d9-42f9-8cbb-96f6b2770dd5 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Updating instance_info_cache with network_info: [{"id": "270836ed-f229-45ed-b23b-58f26fa997be", "address": "fa:16:3e:50:c3:4d", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap270836ed-f2", "ovs_interfaceid": "270836ed-f229-45ed-b23b-58f26fa997be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1080.983036] env[68674]: DEBUG nova.scheduler.client.report [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1081.029830] env[68674]: DEBUG oslo_vmware.api [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240779, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.079765] env[68674]: DEBUG nova.compute.manager [req-7303d35d-c768-4a73-9a3e-e6f16b83cb9d req-109fbd6d-b950-4698-aeb1-4c210b601ef7 service nova] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Received event network-vif-deleted-7ea7b81b-2dc1-4015-9bb0-53ebf3f3c2b7 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1081.079765] env[68674]: INFO nova.compute.manager [req-7303d35d-c768-4a73-9a3e-e6f16b83cb9d req-109fbd6d-b950-4698-aeb1-4c210b601ef7 service nova] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Neutron deleted interface 7ea7b81b-2dc1-4015-9bb0-53ebf3f3c2b7; detaching it from the instance and deleting it from the info cache [ 1081.079765] env[68674]: DEBUG nova.network.neutron [req-7303d35d-c768-4a73-9a3e-e6f16b83cb9d req-109fbd6d-b950-4698-aeb1-4c210b601ef7 service nova] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1081.132923] env[68674]: DEBUG nova.network.neutron [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Updating instance_info_cache with network_info: [{"id": "01a0c28d-5fb2-423a-a0d2-6ff82d2c0f17", "address": "fa:16:3e:d4:a2:fc", "network": {"id": "c0c4733f-8d0b-4cee-883f-2ad57ed16158", "bridge": "br-int", "label": "tempest-ServersTestJSON-40114649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cee54e456084086866d08b098a24b64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01a0c28d-5f", "ovs_interfaceid": "01a0c28d-5fb2-423a-a0d2-6ff82d2c0f17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1081.307381] env[68674]: INFO nova.compute.manager [None req-5b73aaa4-4564-41a6-9827-a925ae13bc95 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Unrescuing [ 1081.307381] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5b73aaa4-4564-41a6-9827-a925ae13bc95 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Acquiring lock "refresh_cache-4214e971-ca72-4c9f-a355-78e5ad8d8219" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.307381] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5b73aaa4-4564-41a6-9827-a925ae13bc95 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Acquired lock "refresh_cache-4214e971-ca72-4c9f-a355-78e5ad8d8219" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1081.307381] env[68674]: DEBUG nova.network.neutron [None req-5b73aaa4-4564-41a6-9827-a925ae13bc95 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1081.364443] env[68674]: DEBUG nova.compute.manager [req-e1264539-3cd4-4726-985f-a5a27dc09a53 req-5a8e3f44-3d45-4f80-88b2-3b0c6fccfbf6 service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Received event network-changed-270836ed-f229-45ed-b23b-58f26fa997be {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1081.364950] env[68674]: DEBUG nova.compute.manager [req-e1264539-3cd4-4726-985f-a5a27dc09a53 req-5a8e3f44-3d45-4f80-88b2-3b0c6fccfbf6 service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Refreshing instance network info cache due to event network-changed-270836ed-f229-45ed-b23b-58f26fa997be. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1081.364950] env[68674]: DEBUG oslo_concurrency.lockutils [req-e1264539-3cd4-4726-985f-a5a27dc09a53 req-5a8e3f44-3d45-4f80-88b2-3b0c6fccfbf6 service nova] Acquiring lock "refresh_cache-66f4ab32-ef66-4d1d-93b6-775d59ce3c41" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.395778] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dd3fd1ff-16d9-42f9-8cbb-96f6b2770dd5 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Releasing lock "refresh_cache-66f4ab32-ef66-4d1d-93b6-775d59ce3c41" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1081.403113] env[68674]: DEBUG oslo_concurrency.lockutils [req-e1264539-3cd4-4726-985f-a5a27dc09a53 req-5a8e3f44-3d45-4f80-88b2-3b0c6fccfbf6 service nova] Acquired lock "refresh_cache-66f4ab32-ef66-4d1d-93b6-775d59ce3c41" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1081.403414] env[68674]: DEBUG nova.network.neutron [req-e1264539-3cd4-4726-985f-a5a27dc09a53 req-5a8e3f44-3d45-4f80-88b2-3b0c6fccfbf6 service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Refreshing network info cache for port 270836ed-f229-45ed-b23b-58f26fa997be {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1081.424535] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b83d8321-b75c-414c-b261-a1262f25206c tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1081.424882] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-62562d95-e449-4229-8964-16699545585e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.434771] env[68674]: DEBUG oslo_vmware.api [None req-b83d8321-b75c-414c-b261-a1262f25206c tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for the task: (returnval){ [ 1081.434771] env[68674]: value = "task-3240781" [ 1081.434771] env[68674]: _type = "Task" [ 1081.434771] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.449024] env[68674]: DEBUG oslo_vmware.api [None req-b83d8321-b75c-414c-b261-a1262f25206c tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3240781, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.476740] env[68674]: DEBUG nova.network.neutron [-] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1081.488506] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.403s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1081.489623] env[68674]: DEBUG nova.compute.manager [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1081.494694] env[68674]: DEBUG oslo_concurrency.lockutils [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.923s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.494957] env[68674]: DEBUG nova.objects.instance [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Lazy-loading 'resources' on Instance uuid cbccde73-b903-47f7-9cbc-f0b376a03435 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1081.531987] env[68674]: DEBUG oslo_vmware.api [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240779, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.517236} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.531987] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d/95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1081.531987] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1081.531987] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cf52fa19-fced-4f86-9bf8-9506a2e3168f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.542037] env[68674]: DEBUG oslo_vmware.api [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1081.542037] env[68674]: value = "task-3240782" [ 1081.542037] env[68674]: _type = "Task" [ 1081.542037] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.553289] env[68674]: DEBUG oslo_vmware.api [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240782, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.584047] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5ca5c861-6d74-4401-bfdf-8c566a1270d8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.595337] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2556350e-545a-4184-9a9f-0f006f993f07 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.632409] env[68674]: DEBUG nova.compute.manager [req-7303d35d-c768-4a73-9a3e-e6f16b83cb9d req-109fbd6d-b950-4698-aeb1-4c210b601ef7 service nova] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Detach interface failed, port_id=7ea7b81b-2dc1-4015-9bb0-53ebf3f3c2b7, reason: Instance f70145c9-4846-42e1-9c1c-de9759097abd could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1081.635198] env[68674]: DEBUG oslo_concurrency.lockutils [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Releasing lock "refresh_cache-caed484b-6fb0-41f2-a35f-8f85117dcf15" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1081.635495] env[68674]: DEBUG nova.compute.manager [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Instance network_info: |[{"id": "01a0c28d-5fb2-423a-a0d2-6ff82d2c0f17", "address": "fa:16:3e:d4:a2:fc", "network": {"id": "c0c4733f-8d0b-4cee-883f-2ad57ed16158", "bridge": "br-int", "label": "tempest-ServersTestJSON-40114649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cee54e456084086866d08b098a24b64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01a0c28d-5f", "ovs_interfaceid": "01a0c28d-5fb2-423a-a0d2-6ff82d2c0f17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1081.635873] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d4:a2:fc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '01a0c28d-5fb2-423a-a0d2-6ff82d2c0f17', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1081.643345] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1081.643859] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1081.644096] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5fb96bf8-d853-44c3-8c2a-b38dd41c4f0c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.659422] env[68674]: DEBUG nova.network.neutron [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Updating instance_info_cache with network_info: [{"id": "4fae8d88-2aaa-48bd-b0c4-72bc768efce3", "address": "fa:16:3e:f8:a7:c2", "network": {"id": "cd9a6296-fa96-4117-b8b5-3884d0d16745", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1543887384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61ea6bfeb37d470a970e9c98e4827ade", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fae8d88-2a", "ovs_interfaceid": "4fae8d88-2aaa-48bd-b0c4-72bc768efce3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1081.666031] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1081.666031] env[68674]: value = "task-3240783" [ 1081.666031] env[68674]: _type = "Task" [ 1081.666031] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.677878] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240783, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.904008] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dd3fd1ff-16d9-42f9-8cbb-96f6b2770dd5 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "interface-66f4ab32-ef66-4d1d-93b6-775d59ce3c41-dc748df7-150d-4b34-a259-782775725005" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.371s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1081.945206] env[68674]: DEBUG oslo_vmware.api [None req-b83d8321-b75c-414c-b261-a1262f25206c tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3240781, 'name': PowerOffVM_Task, 'duration_secs': 0.364294} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.945437] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b83d8321-b75c-414c-b261-a1262f25206c tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1081.945610] env[68674]: DEBUG nova.compute.manager [None req-b83d8321-b75c-414c-b261-a1262f25206c tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1081.948040] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11bce30d-1ab9-40dc-9398-76a7bfa40826 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.979439] env[68674]: INFO nova.compute.manager [-] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Took 1.69 seconds to deallocate network for instance. [ 1081.998431] env[68674]: DEBUG nova.compute.utils [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1082.003021] env[68674]: DEBUG nova.compute.manager [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1082.004185] env[68674]: DEBUG nova.network.neutron [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1082.058573] env[68674]: DEBUG oslo_vmware.api [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240782, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095187} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.058573] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1082.059406] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e413a97f-66e8-4f5f-87d5-0472765ce89f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.067742] env[68674]: DEBUG nova.network.neutron [None req-5b73aaa4-4564-41a6-9827-a925ae13bc95 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Updating instance_info_cache with network_info: [{"id": "d5b4166a-1c05-4ad8-a9f4-697517d72f6c", "address": "fa:16:3e:c3:1f:45", "network": {"id": "1b0c763a-1b06-4dfb-9376-f9d411619180", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1366824526-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "43f00e26b76347d0bd40df46ac3acbcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "939c05b6-8f31-4f3a-95ac-6297e0bd243e", "external-id": "nsx-vlan-transportzone-825", "segmentation_id": 825, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5b4166a-1c", "ovs_interfaceid": "d5b4166a-1c05-4ad8-a9f4-697517d72f6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1082.074058] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a98c1009-f36a-4fb2-84e1-4620c8928966 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "interface-bd3ae195-6e01-49d5-9fcf-9520273d9108-dc748df7-150d-4b34-a259-782775725005" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1082.074058] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a98c1009-f36a-4fb2-84e1-4620c8928966 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "interface-bd3ae195-6e01-49d5-9fcf-9520273d9108-dc748df7-150d-4b34-a259-782775725005" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1082.074058] env[68674]: DEBUG nova.objects.instance [None req-a98c1009-f36a-4fb2-84e1-4620c8928966 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lazy-loading 'flavor' on Instance uuid bd3ae195-6e01-49d5-9fcf-9520273d9108 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1082.099506] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d/95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1082.104174] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-33a9fb38-a197-42d8-8c0f-630ff7c494d6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.127756] env[68674]: DEBUG nova.policy [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f7571ead1f304133b1e10221669af666', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fcfc3ecd6aa74705aefa88d7a95361a0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 1082.137715] env[68674]: DEBUG oslo_vmware.api [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1082.137715] env[68674]: value = "task-3240784" [ 1082.137715] env[68674]: _type = "Task" [ 1082.137715] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.152309] env[68674]: DEBUG oslo_vmware.api [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240784, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.162476] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Releasing lock "refresh_cache-2a7a6269-65a8-402c-b174-a4a46d20a33a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1082.163508] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcd9e721-1713-4548-9e11-c6d14082e01a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.170174] env[68674]: DEBUG nova.network.neutron [req-e1264539-3cd4-4726-985f-a5a27dc09a53 req-5a8e3f44-3d45-4f80-88b2-3b0c6fccfbf6 service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Updated VIF entry in instance network info cache for port 270836ed-f229-45ed-b23b-58f26fa997be. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1082.170557] env[68674]: DEBUG nova.network.neutron [req-e1264539-3cd4-4726-985f-a5a27dc09a53 req-5a8e3f44-3d45-4f80-88b2-3b0c6fccfbf6 service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Updating instance_info_cache with network_info: [{"id": "270836ed-f229-45ed-b23b-58f26fa997be", "address": "fa:16:3e:50:c3:4d", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap270836ed-f2", "ovs_interfaceid": "270836ed-f229-45ed-b23b-58f26fa997be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1082.182221] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ec3ae7c-f3b9-4476-aa98-0c162f7c4c67 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.184658] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240783, 'name': CreateVM_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.323295] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38c5b052-0015-4a02-8b07-4fe332d3f547 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.331437] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c4e5c30-f72a-4c55-8bea-09f66dcd4be0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.364225] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56393092-29af-43b3-b64a-4e41039e1c3a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.373336] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-257889d9-c492-4443-a9f6-80f7ab956c86 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.390842] env[68674]: DEBUG nova.compute.provider_tree [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1082.458730] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b83d8321-b75c-414c-b261-a1262f25206c tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.057s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1082.486489] env[68674]: DEBUG oslo_concurrency.lockutils [None req-807af09e-3a31-414a-a4a3-214b0209fc64 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1082.503505] env[68674]: DEBUG nova.compute.manager [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1082.541646] env[68674]: DEBUG nova.network.neutron [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Successfully created port: f751e885-e868-4e41-a9e7-de64b20c643c {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1082.575206] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5b73aaa4-4564-41a6-9827-a925ae13bc95 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Releasing lock "refresh_cache-4214e971-ca72-4c9f-a355-78e5ad8d8219" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1082.575968] env[68674]: DEBUG nova.objects.instance [None req-5b73aaa4-4564-41a6-9827-a925ae13bc95 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Lazy-loading 'flavor' on Instance uuid 4214e971-ca72-4c9f-a355-78e5ad8d8219 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1082.648715] env[68674]: DEBUG oslo_vmware.api [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240784, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.682025] env[68674]: DEBUG oslo_concurrency.lockutils [req-e1264539-3cd4-4726-985f-a5a27dc09a53 req-5a8e3f44-3d45-4f80-88b2-3b0c6fccfbf6 service nova] Releasing lock "refresh_cache-66f4ab32-ef66-4d1d-93b6-775d59ce3c41" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1082.682025] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240783, 'name': CreateVM_Task, 'duration_secs': 0.900369} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.682025] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1082.682025] env[68674]: DEBUG oslo_concurrency.lockutils [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.682025] env[68674]: DEBUG oslo_concurrency.lockutils [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1082.682025] env[68674]: DEBUG oslo_concurrency.lockutils [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1082.682025] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf923041-5117-4ad4-a36a-2ed0761f9d0c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.686855] env[68674]: DEBUG oslo_vmware.api [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1082.686855] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521387c6-8fce-1ea5-f142-a6e42701aa8a" [ 1082.686855] env[68674]: _type = "Task" [ 1082.686855] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.695439] env[68674]: DEBUG oslo_vmware.api [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521387c6-8fce-1ea5-f142-a6e42701aa8a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.696942] env[68674]: DEBUG nova.objects.instance [None req-a98c1009-f36a-4fb2-84e1-4620c8928966 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lazy-loading 'pci_requests' on Instance uuid bd3ae195-6e01-49d5-9fcf-9520273d9108 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1082.914661] env[68674]: ERROR nova.scheduler.client.report [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] [req-e7de38fc-aca8-4840-8bd3-8bcc565208e3] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ade3f042-7427-494b-9654-0b65e074850c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e7de38fc-aca8-4840-8bd3-8bcc565208e3"}]} [ 1082.938124] env[68674]: DEBUG nova.scheduler.client.report [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Refreshing inventories for resource provider ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1082.955315] env[68674]: DEBUG nova.scheduler.client.report [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Updating ProviderTree inventory for provider ade3f042-7427-494b-9654-0b65e074850c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1082.955543] env[68674]: DEBUG nova.compute.provider_tree [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1082.970965] env[68674]: DEBUG nova.scheduler.client.report [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Refreshing aggregate associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, aggregates: None {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1082.991908] env[68674]: DEBUG nova.scheduler.client.report [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Refreshing trait associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1083.086024] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72af8abd-e68f-4e67-bb0c-3df1b3a7544b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.109508] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b73aaa4-4564-41a6-9827-a925ae13bc95 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1083.113747] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d3e44275-2111-49eb-844e-4f6ee1c052f8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.122822] env[68674]: DEBUG oslo_vmware.api [None req-5b73aaa4-4564-41a6-9827-a925ae13bc95 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1083.122822] env[68674]: value = "task-3240786" [ 1083.122822] env[68674]: _type = "Task" [ 1083.122822] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.135711] env[68674]: DEBUG oslo_vmware.api [None req-5b73aaa4-4564-41a6-9827-a925ae13bc95 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240786, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.150672] env[68674]: DEBUG oslo_vmware.api [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240784, 'name': ReconfigVM_Task, 'duration_secs': 0.857819} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.150996] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Reconfigured VM instance instance-0000006c to attach disk [datastore2] 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d/95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1083.151662] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-890565fa-a3d9-44c9-bd8d-c9da6c9466cf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.158558] env[68674]: DEBUG oslo_vmware.api [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1083.158558] env[68674]: value = "task-3240787" [ 1083.158558] env[68674]: _type = "Task" [ 1083.158558] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.167476] env[68674]: DEBUG oslo_vmware.api [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240787, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.199402] env[68674]: DEBUG nova.objects.base [None req-a98c1009-f36a-4fb2-84e1-4620c8928966 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=68674) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1083.199655] env[68674]: DEBUG nova.network.neutron [None req-a98c1009-f36a-4fb2-84e1-4620c8928966 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1083.201744] env[68674]: DEBUG oslo_vmware.api [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]521387c6-8fce-1ea5-f142-a6e42701aa8a, 'name': SearchDatastore_Task, 'duration_secs': 0.010995} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.202066] env[68674]: DEBUG oslo_concurrency.lockutils [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1083.202361] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1083.202534] env[68674]: DEBUG oslo_concurrency.lockutils [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.202675] env[68674]: DEBUG oslo_concurrency.lockutils [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1083.202866] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1083.203462] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4d390811-f0a4-4763-ad78-f4585524192c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.215317] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1083.215649] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1083.216336] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08e99e47-b06f-4a99-9ff0-3f311232682f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.224405] env[68674]: DEBUG oslo_vmware.api [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1083.224405] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527a79bb-49c3-5349-ef85-c48befbb220f" [ 1083.224405] env[68674]: _type = "Task" [ 1083.224405] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.234850] env[68674]: DEBUG oslo_vmware.api [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527a79bb-49c3-5349-ef85-c48befbb220f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.236793] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36990c39-6b97-4816-862f-1b45c9066e75 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.243602] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a2917b0-3685-41e2-a8f3-dff5278a8802 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.276052] env[68674]: DEBUG nova.policy [None req-a98c1009-f36a-4fb2-84e1-4620c8928966 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fd6c4d1912754a2ea44a65b455b7413c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '21163cbc3a5a4dc3abc832c4560c33e2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 1083.278649] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65cdc82b-8cb6-45e6-bf8f-9768aab89e19 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.286819] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-592868ee-b800-4daf-86cc-f6b5a654600b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.303024] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1083.303024] env[68674]: DEBUG nova.compute.provider_tree [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1083.304070] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-32f582f2-57f8-4f83-a87a-90006733bdc7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.312433] env[68674]: DEBUG oslo_vmware.api [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1083.312433] env[68674]: value = "task-3240788" [ 1083.312433] env[68674]: _type = "Task" [ 1083.312433] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.326267] env[68674]: DEBUG oslo_vmware.api [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240788, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.462054] env[68674]: DEBUG nova.compute.manager [req-7d0b8370-f446-4120-84f6-86c1e6628c13 req-cb38a4e5-f00c-4718-ae3a-7f8944e8104f service nova] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Received event network-changed-b3b3ebd7-0f64-4a86-b249-876c5962725c {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1083.462054] env[68674]: DEBUG nova.compute.manager [req-7d0b8370-f446-4120-84f6-86c1e6628c13 req-cb38a4e5-f00c-4718-ae3a-7f8944e8104f service nova] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Refreshing instance network info cache due to event network-changed-b3b3ebd7-0f64-4a86-b249-876c5962725c. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1083.462054] env[68674]: DEBUG oslo_concurrency.lockutils [req-7d0b8370-f446-4120-84f6-86c1e6628c13 req-cb38a4e5-f00c-4718-ae3a-7f8944e8104f service nova] Acquiring lock "refresh_cache-bd3ae195-6e01-49d5-9fcf-9520273d9108" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.462201] env[68674]: DEBUG oslo_concurrency.lockutils [req-7d0b8370-f446-4120-84f6-86c1e6628c13 req-cb38a4e5-f00c-4718-ae3a-7f8944e8104f service nova] Acquired lock "refresh_cache-bd3ae195-6e01-49d5-9fcf-9520273d9108" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1083.462399] env[68674]: DEBUG nova.network.neutron [req-7d0b8370-f446-4120-84f6-86c1e6628c13 req-cb38a4e5-f00c-4718-ae3a-7f8944e8104f service nova] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Refreshing network info cache for port b3b3ebd7-0f64-4a86-b249-876c5962725c {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1083.516885] env[68674]: DEBUG nova.compute.manager [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1083.547749] env[68674]: DEBUG nova.virt.hardware [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1083.548133] env[68674]: DEBUG nova.virt.hardware [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1083.548327] env[68674]: DEBUG nova.virt.hardware [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1083.548551] env[68674]: DEBUG nova.virt.hardware [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1083.548748] env[68674]: DEBUG nova.virt.hardware [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1083.548934] env[68674]: DEBUG nova.virt.hardware [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1083.549063] env[68674]: DEBUG nova.virt.hardware [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1083.549234] env[68674]: DEBUG nova.virt.hardware [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1083.549401] env[68674]: DEBUG nova.virt.hardware [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1083.549584] env[68674]: DEBUG nova.virt.hardware [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1083.549734] env[68674]: DEBUG nova.virt.hardware [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1083.550638] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-960c18f1-3e6a-4d4f-9540-923b679e5659 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.559927] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b355cd8-96a8-4366-bd3f-8426d2d2163a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.617388] env[68674]: DEBUG nova.objects.instance [None req-d1ba35ac-6af6-4210-8b3e-d276db346db6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lazy-loading 'flavor' on Instance uuid 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1083.632932] env[68674]: DEBUG oslo_vmware.api [None req-5b73aaa4-4564-41a6-9827-a925ae13bc95 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240786, 'name': PowerOffVM_Task, 'duration_secs': 0.251454} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.633865] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b73aaa4-4564-41a6-9827-a925ae13bc95 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1083.640205] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b73aaa4-4564-41a6-9827-a925ae13bc95 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Reconfiguring VM instance instance-00000069 to detach disk 2001 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1083.640805] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c696a15f-3e8e-4f7d-92c2-8b86c5ce49da {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.660065] env[68674]: DEBUG oslo_vmware.api [None req-5b73aaa4-4564-41a6-9827-a925ae13bc95 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1083.660065] env[68674]: value = "task-3240789" [ 1083.660065] env[68674]: _type = "Task" [ 1083.660065] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.671374] env[68674]: DEBUG oslo_vmware.api [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240787, 'name': Rename_Task, 'duration_secs': 0.203826} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.674608] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1083.674871] env[68674]: DEBUG oslo_vmware.api [None req-5b73aaa4-4564-41a6-9827-a925ae13bc95 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240789, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.675091] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d65db0e8-e05d-48df-acbc-a781b1a1c724 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.680696] env[68674]: DEBUG oslo_vmware.api [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1083.680696] env[68674]: value = "task-3240790" [ 1083.680696] env[68674]: _type = "Task" [ 1083.680696] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.688973] env[68674]: DEBUG oslo_vmware.api [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240790, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.735027] env[68674]: DEBUG oslo_vmware.api [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527a79bb-49c3-5349-ef85-c48befbb220f, 'name': SearchDatastore_Task, 'duration_secs': 0.011647} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.735898] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60b0fc6d-41f2-4e9d-b02b-4481c45b9b29 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.740944] env[68674]: DEBUG oslo_vmware.api [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1083.740944] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52db156f-3c5c-37ed-d302-facdb774426f" [ 1083.740944] env[68674]: _type = "Task" [ 1083.740944] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.748848] env[68674]: DEBUG oslo_vmware.api [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52db156f-3c5c-37ed-d302-facdb774426f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.823937] env[68674]: DEBUG oslo_vmware.api [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240788, 'name': PowerOffVM_Task, 'duration_secs': 0.273266} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.824279] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1083.824995] env[68674]: DEBUG nova.virt.hardware [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1083.825271] env[68674]: DEBUG nova.virt.hardware [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1083.825375] env[68674]: DEBUG nova.virt.hardware [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1083.825558] env[68674]: DEBUG nova.virt.hardware [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1083.825706] env[68674]: DEBUG nova.virt.hardware [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1083.825853] env[68674]: DEBUG nova.virt.hardware [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1083.826074] env[68674]: DEBUG nova.virt.hardware [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1083.826248] env[68674]: DEBUG nova.virt.hardware [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1083.826421] env[68674]: DEBUG nova.virt.hardware [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1083.826587] env[68674]: DEBUG nova.virt.hardware [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1083.826788] env[68674]: DEBUG nova.virt.hardware [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1083.831701] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-279d06a5-b89a-4edc-8f8e-17b165571362 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.842881] env[68674]: DEBUG nova.scheduler.client.report [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Updated inventory for provider ade3f042-7427-494b-9654-0b65e074850c with generation 154 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1083.843244] env[68674]: DEBUG nova.compute.provider_tree [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Updating resource provider ade3f042-7427-494b-9654-0b65e074850c generation from 154 to 155 during operation: update_inventory {{(pid=68674) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1083.843391] env[68674]: DEBUG nova.compute.provider_tree [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1083.852351] env[68674]: DEBUG oslo_vmware.api [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1083.852351] env[68674]: value = "task-3240791" [ 1083.852351] env[68674]: _type = "Task" [ 1083.852351] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.860922] env[68674]: DEBUG oslo_vmware.api [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240791, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.122358] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d1ba35ac-6af6-4210-8b3e-d276db346db6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquiring lock "refresh_cache-142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.122654] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d1ba35ac-6af6-4210-8b3e-d276db346db6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquired lock "refresh_cache-142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1084.122715] env[68674]: DEBUG nova.network.neutron [None req-d1ba35ac-6af6-4210-8b3e-d276db346db6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1084.122865] env[68674]: DEBUG nova.objects.instance [None req-d1ba35ac-6af6-4210-8b3e-d276db346db6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lazy-loading 'info_cache' on Instance uuid 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1084.175990] env[68674]: DEBUG oslo_vmware.api [None req-5b73aaa4-4564-41a6-9827-a925ae13bc95 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240789, 'name': ReconfigVM_Task, 'duration_secs': 0.300076} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.176297] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b73aaa4-4564-41a6-9827-a925ae13bc95 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Reconfigured VM instance instance-00000069 to detach disk 2001 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1084.176478] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b73aaa4-4564-41a6-9827-a925ae13bc95 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1084.176732] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ab63a31e-3526-4b40-9392-e74e39ddc8c9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.185914] env[68674]: DEBUG oslo_vmware.api [None req-5b73aaa4-4564-41a6-9827-a925ae13bc95 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1084.185914] env[68674]: value = "task-3240792" [ 1084.185914] env[68674]: _type = "Task" [ 1084.185914] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.191980] env[68674]: DEBUG oslo_vmware.api [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240790, 'name': PowerOnVM_Task, 'duration_secs': 0.508486} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.192584] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1084.192814] env[68674]: INFO nova.compute.manager [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Took 8.39 seconds to spawn the instance on the hypervisor. [ 1084.192978] env[68674]: DEBUG nova.compute.manager [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1084.193724] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8732e93b-09a7-4da3-a885-3ffa158fef93 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.198949] env[68674]: DEBUG oslo_vmware.api [None req-5b73aaa4-4564-41a6-9827-a925ae13bc95 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240792, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.252954] env[68674]: DEBUG oslo_vmware.api [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52db156f-3c5c-37ed-d302-facdb774426f, 'name': SearchDatastore_Task, 'duration_secs': 0.010124} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.253195] env[68674]: DEBUG oslo_concurrency.lockutils [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1084.253464] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] caed484b-6fb0-41f2-a35f-8f85117dcf15/caed484b-6fb0-41f2-a35f-8f85117dcf15.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1084.253747] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d888105a-5a54-4315-822f-1f8f6288a77f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.261201] env[68674]: DEBUG oslo_vmware.api [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1084.261201] env[68674]: value = "task-3240793" [ 1084.261201] env[68674]: _type = "Task" [ 1084.261201] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.269744] env[68674]: DEBUG oslo_vmware.api [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240793, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.350366] env[68674]: DEBUG oslo_concurrency.lockutils [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.856s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1084.355496] env[68674]: DEBUG oslo_concurrency.lockutils [None req-807af09e-3a31-414a-a4a3-214b0209fc64 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.866s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1084.355496] env[68674]: DEBUG nova.objects.instance [None req-807af09e-3a31-414a-a4a3-214b0209fc64 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lazy-loading 'resources' on Instance uuid f70145c9-4846-42e1-9c1c-de9759097abd {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1084.364445] env[68674]: DEBUG oslo_vmware.api [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240791, 'name': ReconfigVM_Task, 'duration_secs': 0.149679} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.365542] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c9a168d-75e8-469a-9b2f-acbf5ce924fe {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.390718] env[68674]: DEBUG nova.virt.hardware [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1084.391537] env[68674]: DEBUG nova.virt.hardware [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1084.391537] env[68674]: DEBUG nova.virt.hardware [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1084.391537] env[68674]: DEBUG nova.virt.hardware [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1084.391537] env[68674]: DEBUG nova.virt.hardware [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1084.391718] env[68674]: DEBUG nova.virt.hardware [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1084.391815] env[68674]: DEBUG nova.virt.hardware [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1084.391974] env[68674]: DEBUG nova.virt.hardware [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1084.392158] env[68674]: DEBUG nova.virt.hardware [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1084.392335] env[68674]: DEBUG nova.virt.hardware [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1084.392553] env[68674]: DEBUG nova.virt.hardware [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1084.394297] env[68674]: INFO nova.scheduler.client.report [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Deleted allocations for instance cbccde73-b903-47f7-9cbc-f0b376a03435 [ 1084.395335] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb4f9fc8-14ee-4e0e-b193-fe695bdf50bd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.404181] env[68674]: DEBUG oslo_vmware.api [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1084.404181] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a7fcdd-658a-5ec7-651f-091b2241ac8c" [ 1084.404181] env[68674]: _type = "Task" [ 1084.404181] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.414752] env[68674]: DEBUG oslo_vmware.api [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a7fcdd-658a-5ec7-651f-091b2241ac8c, 'name': SearchDatastore_Task, 'duration_secs': 0.00848} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.420355] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Reconfiguring VM instance instance-00000058 to detach disk 2000 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1084.421058] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2925dd7d-8b49-46b9-92f9-2e8262f62846 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.435150] env[68674]: DEBUG nova.network.neutron [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Successfully updated port: f751e885-e868-4e41-a9e7-de64b20c643c {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1084.436981] env[68674]: DEBUG nova.network.neutron [req-7d0b8370-f446-4120-84f6-86c1e6628c13 req-cb38a4e5-f00c-4718-ae3a-7f8944e8104f service nova] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Updated VIF entry in instance network info cache for port b3b3ebd7-0f64-4a86-b249-876c5962725c. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1084.437341] env[68674]: DEBUG nova.network.neutron [req-7d0b8370-f446-4120-84f6-86c1e6628c13 req-cb38a4e5-f00c-4718-ae3a-7f8944e8104f service nova] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Updating instance_info_cache with network_info: [{"id": "b3b3ebd7-0f64-4a86-b249-876c5962725c", "address": "fa:16:3e:ca:a0:f1", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3b3ebd7-0f", "ovs_interfaceid": "b3b3ebd7-0f64-4a86-b249-876c5962725c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.443826] env[68674]: DEBUG oslo_vmware.api [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1084.443826] env[68674]: value = "task-3240794" [ 1084.443826] env[68674]: _type = "Task" [ 1084.443826] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.452476] env[68674]: DEBUG oslo_vmware.api [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240794, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.631149] env[68674]: DEBUG nova.objects.base [None req-d1ba35ac-6af6-4210-8b3e-d276db346db6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Object Instance<142e8ede-90e2-47cf-a1b1-8c4fd59eed0a> lazy-loaded attributes: flavor,info_cache {{(pid=68674) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1084.700972] env[68674]: DEBUG oslo_vmware.api [None req-5b73aaa4-4564-41a6-9827-a925ae13bc95 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240792, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.715189] env[68674]: INFO nova.compute.manager [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Took 19.45 seconds to build instance. [ 1084.775873] env[68674]: DEBUG oslo_vmware.api [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240793, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.884463] env[68674]: DEBUG nova.compute.manager [req-0d788e8f-47e6-4615-8c0f-7160e90f9248 req-67195b17-0a75-43eb-909a-1c3fc90f8799 service nova] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Received event network-vif-plugged-dc748df7-150d-4b34-a259-782775725005 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1084.884703] env[68674]: DEBUG oslo_concurrency.lockutils [req-0d788e8f-47e6-4615-8c0f-7160e90f9248 req-67195b17-0a75-43eb-909a-1c3fc90f8799 service nova] Acquiring lock "bd3ae195-6e01-49d5-9fcf-9520273d9108-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1084.885398] env[68674]: DEBUG oslo_concurrency.lockutils [req-0d788e8f-47e6-4615-8c0f-7160e90f9248 req-67195b17-0a75-43eb-909a-1c3fc90f8799 service nova] Lock "bd3ae195-6e01-49d5-9fcf-9520273d9108-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1084.885534] env[68674]: DEBUG oslo_concurrency.lockutils [req-0d788e8f-47e6-4615-8c0f-7160e90f9248 req-67195b17-0a75-43eb-909a-1c3fc90f8799 service nova] Lock "bd3ae195-6e01-49d5-9fcf-9520273d9108-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1084.885698] env[68674]: DEBUG nova.compute.manager [req-0d788e8f-47e6-4615-8c0f-7160e90f9248 req-67195b17-0a75-43eb-909a-1c3fc90f8799 service nova] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] No waiting events found dispatching network-vif-plugged-dc748df7-150d-4b34-a259-782775725005 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1084.885920] env[68674]: WARNING nova.compute.manager [req-0d788e8f-47e6-4615-8c0f-7160e90f9248 req-67195b17-0a75-43eb-909a-1c3fc90f8799 service nova] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Received unexpected event network-vif-plugged-dc748df7-150d-4b34-a259-782775725005 for instance with vm_state active and task_state None. [ 1084.887976] env[68674]: DEBUG nova.network.neutron [None req-a98c1009-f36a-4fb2-84e1-4620c8928966 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Successfully updated port: dc748df7-150d-4b34-a259-782775725005 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1084.906411] env[68674]: DEBUG oslo_concurrency.lockutils [None req-adaaf95b-ada0-464b-b63f-bf9c03222e50 tempest-InstanceActionsTestJSON-1591416933 tempest-InstanceActionsTestJSON-1591416933-project-member] Lock "cbccde73-b903-47f7-9cbc-f0b376a03435" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.718s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1084.939693] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquiring lock "refresh_cache-9b8aad00-0980-4752-954a-c09c9ae6f9ec" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.939850] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquired lock "refresh_cache-9b8aad00-0980-4752-954a-c09c9ae6f9ec" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1084.940014] env[68674]: DEBUG nova.network.neutron [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1084.941279] env[68674]: DEBUG oslo_concurrency.lockutils [req-7d0b8370-f446-4120-84f6-86c1e6628c13 req-cb38a4e5-f00c-4718-ae3a-7f8944e8104f service nova] Releasing lock "refresh_cache-bd3ae195-6e01-49d5-9fcf-9520273d9108" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1084.964108] env[68674]: DEBUG oslo_vmware.api [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240794, 'name': ReconfigVM_Task, 'duration_secs': 0.437949} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.967680] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Reconfigured VM instance instance-00000058 to detach disk 2000 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1084.969317] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b42f6a0e-7863-42f1-b6f2-030dd3175dfe {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.015824] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] 2a7a6269-65a8-402c-b174-a4a46d20a33a/2a7a6269-65a8-402c-b174-a4a46d20a33a.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1085.020779] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0f2fc67-db2e-4f4b-bcc2-1391de7f39c2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.047613] env[68674]: DEBUG oslo_vmware.api [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1085.047613] env[68674]: value = "task-3240796" [ 1085.047613] env[68674]: _type = "Task" [ 1085.047613] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.057179] env[68674]: DEBUG oslo_vmware.api [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240796, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.133628] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b953a00a-28f5-409d-a957-889fbc266fc1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.144952] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b9bfbfa-600f-41a0-b82c-41ba45e18c2a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.181687] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be7423aa-ba9d-4341-81de-95f5ac64b073 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.193634] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6707842e-5103-47bd-b98c-481e5fa3df07 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.206797] env[68674]: DEBUG oslo_vmware.api [None req-5b73aaa4-4564-41a6-9827-a925ae13bc95 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240792, 'name': PowerOnVM_Task, 'duration_secs': 0.543165} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.214393] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b73aaa4-4564-41a6-9827-a925ae13bc95 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1085.214667] env[68674]: DEBUG nova.compute.manager [None req-5b73aaa4-4564-41a6-9827-a925ae13bc95 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1085.215186] env[68674]: DEBUG nova.compute.provider_tree [None req-807af09e-3a31-414a-a4a3-214b0209fc64 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1085.217109] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03ea3ebe-c484-46d7-bed8-ee52a98de506 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.220260] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8e383736-b85a-4cfc-b744-0f2e127475dc tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.968s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1085.272611] env[68674]: DEBUG oslo_vmware.api [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240793, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.544648} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.273666] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] caed484b-6fb0-41f2-a35f-8f85117dcf15/caed484b-6fb0-41f2-a35f-8f85117dcf15.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1085.273919] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1085.274213] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6f3f05f2-296d-4413-b034-807e0b3fade4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.287329] env[68674]: DEBUG oslo_vmware.api [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1085.287329] env[68674]: value = "task-3240797" [ 1085.287329] env[68674]: _type = "Task" [ 1085.287329] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.296509] env[68674]: DEBUG oslo_vmware.api [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240797, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.391059] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a98c1009-f36a-4fb2-84e1-4620c8928966 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "refresh_cache-bd3ae195-6e01-49d5-9fcf-9520273d9108" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1085.391267] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a98c1009-f36a-4fb2-84e1-4620c8928966 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquired lock "refresh_cache-bd3ae195-6e01-49d5-9fcf-9520273d9108" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1085.391455] env[68674]: DEBUG nova.network.neutron [None req-a98c1009-f36a-4fb2-84e1-4620c8928966 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1085.497999] env[68674]: DEBUG nova.network.neutron [None req-d1ba35ac-6af6-4210-8b3e-d276db346db6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Updating instance_info_cache with network_info: [{"id": "4d94c698-e74c-4238-8f2e-ead75015687e", "address": "fa:16:3e:2c:a1:73", "network": {"id": "e5c1d0d2-3458-4788-9640-4e14ad781436", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1292108367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.240", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9bc5a5f88cdd441fbb0df17cab2fcecc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8cbc9b8f-ce19-4262-bf4d-88cd4f259a1c", "external-id": "nsx-vlan-transportzone-630", "segmentation_id": 630, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d94c698-e7", "ovs_interfaceid": "4d94c698-e74c-4238-8f2e-ead75015687e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1085.503420] env[68674]: DEBUG nova.network.neutron [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1085.561923] env[68674]: DEBUG oslo_vmware.api [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240796, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.574132] env[68674]: DEBUG nova.compute.manager [req-3d179b16-6d3d-4e4e-a325-bdc876da673d req-ce0c62a6-0d4b-403b-b387-f09ce94638ae service nova] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Received event network-vif-plugged-f751e885-e868-4e41-a9e7-de64b20c643c {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1085.574132] env[68674]: DEBUG oslo_concurrency.lockutils [req-3d179b16-6d3d-4e4e-a325-bdc876da673d req-ce0c62a6-0d4b-403b-b387-f09ce94638ae service nova] Acquiring lock "9b8aad00-0980-4752-954a-c09c9ae6f9ec-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1085.574132] env[68674]: DEBUG oslo_concurrency.lockutils [req-3d179b16-6d3d-4e4e-a325-bdc876da673d req-ce0c62a6-0d4b-403b-b387-f09ce94638ae service nova] Lock "9b8aad00-0980-4752-954a-c09c9ae6f9ec-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.574132] env[68674]: DEBUG oslo_concurrency.lockutils [req-3d179b16-6d3d-4e4e-a325-bdc876da673d req-ce0c62a6-0d4b-403b-b387-f09ce94638ae service nova] Lock "9b8aad00-0980-4752-954a-c09c9ae6f9ec-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1085.574132] env[68674]: DEBUG nova.compute.manager [req-3d179b16-6d3d-4e4e-a325-bdc876da673d req-ce0c62a6-0d4b-403b-b387-f09ce94638ae service nova] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] No waiting events found dispatching network-vif-plugged-f751e885-e868-4e41-a9e7-de64b20c643c {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1085.574132] env[68674]: WARNING nova.compute.manager [req-3d179b16-6d3d-4e4e-a325-bdc876da673d req-ce0c62a6-0d4b-403b-b387-f09ce94638ae service nova] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Received unexpected event network-vif-plugged-f751e885-e868-4e41-a9e7-de64b20c643c for instance with vm_state building and task_state spawning. [ 1085.574132] env[68674]: DEBUG nova.compute.manager [req-3d179b16-6d3d-4e4e-a325-bdc876da673d req-ce0c62a6-0d4b-403b-b387-f09ce94638ae service nova] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Received event network-changed-f751e885-e868-4e41-a9e7-de64b20c643c {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1085.574132] env[68674]: DEBUG nova.compute.manager [req-3d179b16-6d3d-4e4e-a325-bdc876da673d req-ce0c62a6-0d4b-403b-b387-f09ce94638ae service nova] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Refreshing instance network info cache due to event network-changed-f751e885-e868-4e41-a9e7-de64b20c643c. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1085.574132] env[68674]: DEBUG oslo_concurrency.lockutils [req-3d179b16-6d3d-4e4e-a325-bdc876da673d req-ce0c62a6-0d4b-403b-b387-f09ce94638ae service nova] Acquiring lock "refresh_cache-9b8aad00-0980-4752-954a-c09c9ae6f9ec" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1085.704045] env[68674]: DEBUG nova.network.neutron [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Updating instance_info_cache with network_info: [{"id": "f751e885-e868-4e41-a9e7-de64b20c643c", "address": "fa:16:3e:cb:db:79", "network": {"id": "dd6a13cc-564e-4e30-a518-536c9c1a1c8d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2104984174-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fcfc3ecd6aa74705aefa88d7a95361a0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf751e885-e8", "ovs_interfaceid": "f751e885-e868-4e41-a9e7-de64b20c643c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1085.723680] env[68674]: DEBUG nova.scheduler.client.report [None req-807af09e-3a31-414a-a4a3-214b0209fc64 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1085.798313] env[68674]: DEBUG oslo_vmware.api [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240797, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.195916} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.798592] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1085.799443] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d6a47e8-04c8-4dc9-bfee-334810542845 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.822296] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Reconfiguring VM instance instance-0000006d to attach disk [datastore2] caed484b-6fb0-41f2-a35f-8f85117dcf15/caed484b-6fb0-41f2-a35f-8f85117dcf15.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1085.822626] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2f52f141-58aa-4558-892a-d8701ad2a5f3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.843879] env[68674]: DEBUG oslo_vmware.api [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1085.843879] env[68674]: value = "task-3240798" [ 1085.843879] env[68674]: _type = "Task" [ 1085.843879] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.852679] env[68674]: DEBUG oslo_vmware.api [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240798, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.932952] env[68674]: WARNING nova.network.neutron [None req-a98c1009-f36a-4fb2-84e1-4620c8928966 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] a803f1d7-ea36-4d0a-9a85-9b7a8d27f698 already exists in list: networks containing: ['a803f1d7-ea36-4d0a-9a85-9b7a8d27f698']. ignoring it [ 1086.004736] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d1ba35ac-6af6-4210-8b3e-d276db346db6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Releasing lock "refresh_cache-142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1086.059708] env[68674]: DEBUG oslo_vmware.api [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240796, 'name': ReconfigVM_Task, 'duration_secs': 0.610458} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.060110] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Reconfigured VM instance instance-00000058 to attach disk [datastore1] 2a7a6269-65a8-402c-b174-a4a46d20a33a/2a7a6269-65a8-402c-b174-a4a46d20a33a.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1086.061029] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46dbee74-5132-4e8b-819a-8d3798f4f6b1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.083915] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8b92172-38b4-48b3-99e8-5dec23eb18ed {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.113603] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dae9b525-f95e-4a9d-947b-51c7e0a62cc1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.136746] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-751d7fdb-e4b1-499a-88fe-b8c5d3a6abf2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.146842] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1086.147455] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-63501aee-2683-4c3d-b528-78e5c8e71814 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.154625] env[68674]: DEBUG oslo_vmware.api [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1086.154625] env[68674]: value = "task-3240799" [ 1086.154625] env[68674]: _type = "Task" [ 1086.154625] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.163689] env[68674]: DEBUG oslo_vmware.api [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240799, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.206486] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Releasing lock "refresh_cache-9b8aad00-0980-4752-954a-c09c9ae6f9ec" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1086.206853] env[68674]: DEBUG nova.compute.manager [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Instance network_info: |[{"id": "f751e885-e868-4e41-a9e7-de64b20c643c", "address": "fa:16:3e:cb:db:79", "network": {"id": "dd6a13cc-564e-4e30-a518-536c9c1a1c8d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2104984174-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fcfc3ecd6aa74705aefa88d7a95361a0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf751e885-e8", "ovs_interfaceid": "f751e885-e868-4e41-a9e7-de64b20c643c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1086.207212] env[68674]: DEBUG oslo_concurrency.lockutils [req-3d179b16-6d3d-4e4e-a325-bdc876da673d req-ce0c62a6-0d4b-403b-b387-f09ce94638ae service nova] Acquired lock "refresh_cache-9b8aad00-0980-4752-954a-c09c9ae6f9ec" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1086.207402] env[68674]: DEBUG nova.network.neutron [req-3d179b16-6d3d-4e4e-a325-bdc876da673d req-ce0c62a6-0d4b-403b-b387-f09ce94638ae service nova] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Refreshing network info cache for port f751e885-e868-4e41-a9e7-de64b20c643c {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1086.208634] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cb:db:79', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'abcf0d10-3f3f-45dc-923e-1c78766e2dad', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f751e885-e868-4e41-a9e7-de64b20c643c', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1086.219979] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Creating folder: Project (fcfc3ecd6aa74705aefa88d7a95361a0). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1086.221470] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ece3dccc-d6b6-4d5d-b5d4-1616566ab591 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.226670] env[68674]: DEBUG oslo_concurrency.lockutils [None req-807af09e-3a31-414a-a4a3-214b0209fc64 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.874s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1086.245925] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Created folder: Project (fcfc3ecd6aa74705aefa88d7a95361a0) in parent group-v647377. [ 1086.245925] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Creating folder: Instances. Parent ref: group-v647683. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1086.246105] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-98b60183-abce-4fde-b434-9749c759bcba {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.257690] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Created folder: Instances in parent group-v647683. [ 1086.257690] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1086.257690] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1086.257690] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ef2d861d-7d71-4484-ae71-270656a923e8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.273840] env[68674]: INFO nova.scheduler.client.report [None req-807af09e-3a31-414a-a4a3-214b0209fc64 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Deleted allocations for instance f70145c9-4846-42e1-9c1c-de9759097abd [ 1086.282200] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1086.282200] env[68674]: value = "task-3240802" [ 1086.282200] env[68674]: _type = "Task" [ 1086.282200] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.297025] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240802, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.357249] env[68674]: DEBUG oslo_vmware.api [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240798, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.378236] env[68674]: DEBUG nova.network.neutron [None req-a98c1009-f36a-4fb2-84e1-4620c8928966 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Updating instance_info_cache with network_info: [{"id": "b3b3ebd7-0f64-4a86-b249-876c5962725c", "address": "fa:16:3e:ca:a0:f1", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3b3ebd7-0f", "ovs_interfaceid": "b3b3ebd7-0f64-4a86-b249-876c5962725c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "dc748df7-150d-4b34-a259-782775725005", "address": "fa:16:3e:92:eb:c8", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc748df7-15", "ovs_interfaceid": "dc748df7-150d-4b34-a259-782775725005", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1086.458328] env[68674]: DEBUG oslo_concurrency.lockutils [None req-21fcbad6-3b02-4e29-b016-59643eddee1d tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Acquiring lock "4214e971-ca72-4c9f-a355-78e5ad8d8219" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1086.458681] env[68674]: DEBUG oslo_concurrency.lockutils [None req-21fcbad6-3b02-4e29-b016-59643eddee1d tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Lock "4214e971-ca72-4c9f-a355-78e5ad8d8219" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1086.458999] env[68674]: DEBUG oslo_concurrency.lockutils [None req-21fcbad6-3b02-4e29-b016-59643eddee1d tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Acquiring lock "4214e971-ca72-4c9f-a355-78e5ad8d8219-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1086.459270] env[68674]: DEBUG oslo_concurrency.lockutils [None req-21fcbad6-3b02-4e29-b016-59643eddee1d tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Lock "4214e971-ca72-4c9f-a355-78e5ad8d8219-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1086.459492] env[68674]: DEBUG oslo_concurrency.lockutils [None req-21fcbad6-3b02-4e29-b016-59643eddee1d tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Lock "4214e971-ca72-4c9f-a355-78e5ad8d8219-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1086.462133] env[68674]: INFO nova.compute.manager [None req-21fcbad6-3b02-4e29-b016-59643eddee1d tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Terminating instance [ 1086.560296] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d34cef2b-56e8-4e42-9360-c96ede400c2f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1086.560455] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d34cef2b-56e8-4e42-9360-c96ede400c2f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1086.666958] env[68674]: DEBUG oslo_vmware.api [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240799, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.790016] env[68674]: DEBUG oslo_concurrency.lockutils [None req-807af09e-3a31-414a-a4a3-214b0209fc64 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "f70145c9-4846-42e1-9c1c-de9759097abd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.124s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1086.799663] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240802, 'name': CreateVM_Task, 'duration_secs': 0.438191} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.802477] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1086.803223] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1086.803520] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1086.803858] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1086.804558] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d2e7ead-2e76-4842-8400-8ea1cc59e032 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.810743] env[68674]: DEBUG oslo_vmware.api [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1086.810743] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d3e23a-66fa-7a9e-743b-e4cc27d7047e" [ 1086.810743] env[68674]: _type = "Task" [ 1086.810743] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.820742] env[68674]: DEBUG oslo_vmware.api [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d3e23a-66fa-7a9e-743b-e4cc27d7047e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.855952] env[68674]: DEBUG oslo_vmware.api [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240798, 'name': ReconfigVM_Task, 'duration_secs': 0.681748} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.856279] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Reconfigured VM instance instance-0000006d to attach disk [datastore2] caed484b-6fb0-41f2-a35f-8f85117dcf15/caed484b-6fb0-41f2-a35f-8f85117dcf15.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1086.856980] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a476ee1a-29d3-41c6-bde6-16241e684b0d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.865803] env[68674]: DEBUG oslo_vmware.api [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1086.865803] env[68674]: value = "task-3240803" [ 1086.865803] env[68674]: _type = "Task" [ 1086.865803] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.875207] env[68674]: DEBUG oslo_vmware.api [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240803, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.882070] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a98c1009-f36a-4fb2-84e1-4620c8928966 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Releasing lock "refresh_cache-bd3ae195-6e01-49d5-9fcf-9520273d9108" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1086.882774] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a98c1009-f36a-4fb2-84e1-4620c8928966 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "bd3ae195-6e01-49d5-9fcf-9520273d9108" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1086.882992] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a98c1009-f36a-4fb2-84e1-4620c8928966 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquired lock "bd3ae195-6e01-49d5-9fcf-9520273d9108" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1086.883929] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18977963-99df-4577-99da-b160d5591955 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.903967] env[68674]: DEBUG nova.virt.hardware [None req-a98c1009-f36a-4fb2-84e1-4620c8928966 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1086.904283] env[68674]: DEBUG nova.virt.hardware [None req-a98c1009-f36a-4fb2-84e1-4620c8928966 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1086.904503] env[68674]: DEBUG nova.virt.hardware [None req-a98c1009-f36a-4fb2-84e1-4620c8928966 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1086.904720] env[68674]: DEBUG nova.virt.hardware [None req-a98c1009-f36a-4fb2-84e1-4620c8928966 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1086.904878] env[68674]: DEBUG nova.virt.hardware [None req-a98c1009-f36a-4fb2-84e1-4620c8928966 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1086.905063] env[68674]: DEBUG nova.virt.hardware [None req-a98c1009-f36a-4fb2-84e1-4620c8928966 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1086.905318] env[68674]: DEBUG nova.virt.hardware [None req-a98c1009-f36a-4fb2-84e1-4620c8928966 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1086.905492] env[68674]: DEBUG nova.virt.hardware [None req-a98c1009-f36a-4fb2-84e1-4620c8928966 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1086.905680] env[68674]: DEBUG nova.virt.hardware [None req-a98c1009-f36a-4fb2-84e1-4620c8928966 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1086.905912] env[68674]: DEBUG nova.virt.hardware [None req-a98c1009-f36a-4fb2-84e1-4620c8928966 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1086.906093] env[68674]: DEBUG nova.virt.hardware [None req-a98c1009-f36a-4fb2-84e1-4620c8928966 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1086.912637] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a98c1009-f36a-4fb2-84e1-4620c8928966 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Reconfiguring VM to attach interface {{(pid=68674) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1086.913047] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4bbcb469-e8ba-40fb-9bd2-9a755fb2e0cc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.935356] env[68674]: DEBUG oslo_vmware.api [None req-a98c1009-f36a-4fb2-84e1-4620c8928966 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 1086.935356] env[68674]: value = "task-3240804" [ 1086.935356] env[68674]: _type = "Task" [ 1086.935356] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.944966] env[68674]: DEBUG oslo_vmware.api [None req-a98c1009-f36a-4fb2-84e1-4620c8928966 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240804, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.963822] env[68674]: DEBUG nova.network.neutron [req-3d179b16-6d3d-4e4e-a325-bdc876da673d req-ce0c62a6-0d4b-403b-b387-f09ce94638ae service nova] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Updated VIF entry in instance network info cache for port f751e885-e868-4e41-a9e7-de64b20c643c. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1086.964245] env[68674]: DEBUG nova.network.neutron [req-3d179b16-6d3d-4e4e-a325-bdc876da673d req-ce0c62a6-0d4b-403b-b387-f09ce94638ae service nova] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Updating instance_info_cache with network_info: [{"id": "f751e885-e868-4e41-a9e7-de64b20c643c", "address": "fa:16:3e:cb:db:79", "network": {"id": "dd6a13cc-564e-4e30-a518-536c9c1a1c8d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2104984174-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fcfc3ecd6aa74705aefa88d7a95361a0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf751e885-e8", "ovs_interfaceid": "f751e885-e868-4e41-a9e7-de64b20c643c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1086.965965] env[68674]: DEBUG nova.compute.manager [None req-21fcbad6-3b02-4e29-b016-59643eddee1d tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1086.966254] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-21fcbad6-3b02-4e29-b016-59643eddee1d tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1086.967310] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cfeb9fa-bed0-4c72-9e32-5b004ae5888a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.976318] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-21fcbad6-3b02-4e29-b016-59643eddee1d tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1086.977206] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-30dd4750-7c6c-4ec8-a65e-744a63f6102d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.985783] env[68674]: DEBUG oslo_vmware.api [None req-21fcbad6-3b02-4e29-b016-59643eddee1d tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1086.985783] env[68674]: value = "task-3240805" [ 1086.985783] env[68674]: _type = "Task" [ 1086.985783] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.995423] env[68674]: DEBUG oslo_vmware.api [None req-21fcbad6-3b02-4e29-b016-59643eddee1d tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240805, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.010224] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1ba35ac-6af6-4210-8b3e-d276db346db6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1087.010542] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-581d790c-1332-4bbb-b024-cc6bd24a2201 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.020518] env[68674]: DEBUG oslo_vmware.api [None req-d1ba35ac-6af6-4210-8b3e-d276db346db6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for the task: (returnval){ [ 1087.020518] env[68674]: value = "task-3240806" [ 1087.020518] env[68674]: _type = "Task" [ 1087.020518] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.031781] env[68674]: DEBUG oslo_vmware.api [None req-d1ba35ac-6af6-4210-8b3e-d276db346db6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3240806, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.064770] env[68674]: DEBUG nova.compute.utils [None req-d34cef2b-56e8-4e42-9360-c96ede400c2f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1087.165617] env[68674]: DEBUG oslo_vmware.api [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240799, 'name': PowerOnVM_Task, 'duration_secs': 0.561862} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.165932] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1087.323819] env[68674]: DEBUG oslo_vmware.api [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d3e23a-66fa-7a9e-743b-e4cc27d7047e, 'name': SearchDatastore_Task, 'duration_secs': 0.011638} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.324155] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1087.324437] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1087.324736] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1087.324940] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1087.325210] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1087.325523] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-36a38231-a302-4fc2-ae9a-c544c5d5389a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.337456] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1087.337659] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1087.338437] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed406e94-510a-4b44-9bf6-130ba65bb5cd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.348100] env[68674]: DEBUG oslo_vmware.api [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1087.348100] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52776735-0614-4435-e2f8-948f18d68ef0" [ 1087.348100] env[68674]: _type = "Task" [ 1087.348100] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.357262] env[68674]: DEBUG oslo_vmware.api [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52776735-0614-4435-e2f8-948f18d68ef0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.376787] env[68674]: DEBUG oslo_vmware.api [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240803, 'name': Rename_Task, 'duration_secs': 0.214313} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.377159] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1087.377420] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-844943d2-3b6d-45ed-9621-e22a25e16431 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.386773] env[68674]: DEBUG oslo_vmware.api [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1087.386773] env[68674]: value = "task-3240808" [ 1087.386773] env[68674]: _type = "Task" [ 1087.386773] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.396325] env[68674]: DEBUG oslo_vmware.api [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240808, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.447840] env[68674]: DEBUG oslo_vmware.api [None req-a98c1009-f36a-4fb2-84e1-4620c8928966 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240804, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.467859] env[68674]: DEBUG oslo_concurrency.lockutils [req-3d179b16-6d3d-4e4e-a325-bdc876da673d req-ce0c62a6-0d4b-403b-b387-f09ce94638ae service nova] Releasing lock "refresh_cache-9b8aad00-0980-4752-954a-c09c9ae6f9ec" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1087.497304] env[68674]: DEBUG oslo_vmware.api [None req-21fcbad6-3b02-4e29-b016-59643eddee1d tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240805, 'name': PowerOffVM_Task, 'duration_secs': 0.418608} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.497595] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-21fcbad6-3b02-4e29-b016-59643eddee1d tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1087.497773] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-21fcbad6-3b02-4e29-b016-59643eddee1d tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1087.498055] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ff1053ca-d1e9-44bb-88cb-c1c5289d8018 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.532334] env[68674]: DEBUG oslo_vmware.api [None req-d1ba35ac-6af6-4210-8b3e-d276db346db6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3240806, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.568442] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d34cef2b-56e8-4e42-9360-c96ede400c2f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1087.591502] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-21fcbad6-3b02-4e29-b016-59643eddee1d tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1087.591775] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-21fcbad6-3b02-4e29-b016-59643eddee1d tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1087.592020] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-21fcbad6-3b02-4e29-b016-59643eddee1d tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Deleting the datastore file [datastore1] 4214e971-ca72-4c9f-a355-78e5ad8d8219 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1087.592354] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9425342b-d54c-4d3b-82bf-63721886a76c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.601674] env[68674]: DEBUG oslo_vmware.api [None req-21fcbad6-3b02-4e29-b016-59643eddee1d tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1087.601674] env[68674]: value = "task-3240810" [ 1087.601674] env[68674]: _type = "Task" [ 1087.601674] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.611127] env[68674]: DEBUG oslo_vmware.api [None req-21fcbad6-3b02-4e29-b016-59643eddee1d tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240810, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.859541] env[68674]: DEBUG oslo_vmware.api [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52776735-0614-4435-e2f8-948f18d68ef0, 'name': SearchDatastore_Task, 'duration_secs': 0.022026} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.860336] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0334e755-fc34-4c08-9f4a-418e64a06300 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.866254] env[68674]: DEBUG oslo_vmware.api [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1087.866254] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52795282-f9b0-d679-aaca-d4d5a66b3447" [ 1087.866254] env[68674]: _type = "Task" [ 1087.866254] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.874963] env[68674]: DEBUG oslo_vmware.api [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52795282-f9b0-d679-aaca-d4d5a66b3447, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.896556] env[68674]: DEBUG oslo_vmware.api [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240808, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.947330] env[68674]: DEBUG oslo_vmware.api [None req-a98c1009-f36a-4fb2-84e1-4620c8928966 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240804, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.033774] env[68674]: DEBUG oslo_vmware.api [None req-d1ba35ac-6af6-4210-8b3e-d276db346db6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3240806, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.111625] env[68674]: DEBUG oslo_vmware.api [None req-21fcbad6-3b02-4e29-b016-59643eddee1d tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240810, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.385711} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.111902] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-21fcbad6-3b02-4e29-b016-59643eddee1d tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1088.112100] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-21fcbad6-3b02-4e29-b016-59643eddee1d tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1088.112324] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-21fcbad6-3b02-4e29-b016-59643eddee1d tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1088.112513] env[68674]: INFO nova.compute.manager [None req-21fcbad6-3b02-4e29-b016-59643eddee1d tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1088.112789] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-21fcbad6-3b02-4e29-b016-59643eddee1d tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1088.113009] env[68674]: DEBUG nova.compute.manager [-] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1088.113112] env[68674]: DEBUG nova.network.neutron [-] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1088.211465] env[68674]: INFO nova.compute.manager [None req-0270e9d1-fdd6-40a8-a2e5-cd29b8c0cbd7 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Updating instance to original state: 'active' [ 1088.377988] env[68674]: DEBUG oslo_vmware.api [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52795282-f9b0-d679-aaca-d4d5a66b3447, 'name': SearchDatastore_Task, 'duration_secs': 0.023579} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.378223] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1088.378482] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 9b8aad00-0980-4752-954a-c09c9ae6f9ec/9b8aad00-0980-4752-954a-c09c9ae6f9ec.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1088.378734] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-06eda19d-5cfa-4800-99cb-fcabdbfb9be0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.387230] env[68674]: DEBUG oslo_vmware.api [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1088.387230] env[68674]: value = "task-3240811" [ 1088.387230] env[68674]: _type = "Task" [ 1088.387230] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.403166] env[68674]: DEBUG oslo_vmware.api [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240811, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.403166] env[68674]: DEBUG oslo_vmware.api [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240808, 'name': PowerOnVM_Task, 'duration_secs': 0.667947} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.403166] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1088.403441] env[68674]: INFO nova.compute.manager [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Took 10.20 seconds to spawn the instance on the hypervisor. [ 1088.403441] env[68674]: DEBUG nova.compute.manager [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1088.404147] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b7278dc-dba3-4d15-99b9-f6215f3d12dc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.448844] env[68674]: DEBUG oslo_vmware.api [None req-a98c1009-f36a-4fb2-84e1-4620c8928966 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240804, 'name': ReconfigVM_Task, 'duration_secs': 1.317784} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.448844] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a98c1009-f36a-4fb2-84e1-4620c8928966 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Releasing lock "bd3ae195-6e01-49d5-9fcf-9520273d9108" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1088.449115] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a98c1009-f36a-4fb2-84e1-4620c8928966 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Reconfigured VM to attach interface {{(pid=68674) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1088.529308] env[68674]: DEBUG nova.compute.manager [req-d71d36cf-4240-45c5-bd7a-0592b4977eb6 req-f881fbe4-f944-4fea-9245-11252d6401dc service nova] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Received event network-changed-dc748df7-150d-4b34-a259-782775725005 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1088.529520] env[68674]: DEBUG nova.compute.manager [req-d71d36cf-4240-45c5-bd7a-0592b4977eb6 req-f881fbe4-f944-4fea-9245-11252d6401dc service nova] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Refreshing instance network info cache due to event network-changed-dc748df7-150d-4b34-a259-782775725005. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1088.529747] env[68674]: DEBUG oslo_concurrency.lockutils [req-d71d36cf-4240-45c5-bd7a-0592b4977eb6 req-f881fbe4-f944-4fea-9245-11252d6401dc service nova] Acquiring lock "refresh_cache-bd3ae195-6e01-49d5-9fcf-9520273d9108" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.529885] env[68674]: DEBUG oslo_concurrency.lockutils [req-d71d36cf-4240-45c5-bd7a-0592b4977eb6 req-f881fbe4-f944-4fea-9245-11252d6401dc service nova] Acquired lock "refresh_cache-bd3ae195-6e01-49d5-9fcf-9520273d9108" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1088.530073] env[68674]: DEBUG nova.network.neutron [req-d71d36cf-4240-45c5-bd7a-0592b4977eb6 req-f881fbe4-f944-4fea-9245-11252d6401dc service nova] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Refreshing network info cache for port dc748df7-150d-4b34-a259-782775725005 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1088.535111] env[68674]: DEBUG oslo_vmware.api [None req-d1ba35ac-6af6-4210-8b3e-d276db346db6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3240806, 'name': PowerOnVM_Task, 'duration_secs': 1.128145} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.535660] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1ba35ac-6af6-4210-8b3e-d276db346db6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1088.535795] env[68674]: DEBUG nova.compute.manager [None req-d1ba35ac-6af6-4210-8b3e-d276db346db6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1088.537086] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-208b63ae-0a79-4c9b-adf0-f4bbeca125ae {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.635327] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d34cef2b-56e8-4e42-9360-c96ede400c2f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1088.635646] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d34cef2b-56e8-4e42-9360-c96ede400c2f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.635893] env[68674]: INFO nova.compute.manager [None req-d34cef2b-56e8-4e42-9360-c96ede400c2f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Attaching volume 38e9aac1-85e2-4fee-b30d-95805ec4d8bb to /dev/sdb [ 1088.672897] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac9ad2bf-8b9d-4f8b-95cf-5f5f96f31796 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.680742] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6598dd5-2709-49b5-be87-cd380d52ac2e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.695050] env[68674]: DEBUG nova.virt.block_device [None req-d34cef2b-56e8-4e42-9360-c96ede400c2f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Updating existing volume attachment record: 10c6f9da-dff4-47b7-8ff8-fd23026f3469 {{(pid=68674) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1088.897876] env[68674]: DEBUG oslo_vmware.api [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240811, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.903861] env[68674]: DEBUG nova.compute.manager [req-3cb766da-7936-4ac1-9de1-6a720725da8d req-9f7c8130-4bc1-45e2-b262-08e0023d66ab service nova] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Received event network-vif-deleted-d5b4166a-1c05-4ad8-a9f4-697517d72f6c {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1088.904073] env[68674]: INFO nova.compute.manager [req-3cb766da-7936-4ac1-9de1-6a720725da8d req-9f7c8130-4bc1-45e2-b262-08e0023d66ab service nova] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Neutron deleted interface d5b4166a-1c05-4ad8-a9f4-697517d72f6c; detaching it from the instance and deleting it from the info cache [ 1088.904253] env[68674]: DEBUG nova.network.neutron [req-3cb766da-7936-4ac1-9de1-6a720725da8d req-9f7c8130-4bc1-45e2-b262-08e0023d66ab service nova] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1088.923049] env[68674]: INFO nova.compute.manager [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Took 20.39 seconds to build instance. [ 1088.953554] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a98c1009-f36a-4fb2-84e1-4620c8928966 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "interface-bd3ae195-6e01-49d5-9fcf-9520273d9108-dc748df7-150d-4b34-a259-782775725005" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.881s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.990926] env[68674]: DEBUG nova.network.neutron [-] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1089.328468] env[68674]: DEBUG nova.network.neutron [req-d71d36cf-4240-45c5-bd7a-0592b4977eb6 req-f881fbe4-f944-4fea-9245-11252d6401dc service nova] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Updated VIF entry in instance network info cache for port dc748df7-150d-4b34-a259-782775725005. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1089.328468] env[68674]: DEBUG nova.network.neutron [req-d71d36cf-4240-45c5-bd7a-0592b4977eb6 req-f881fbe4-f944-4fea-9245-11252d6401dc service nova] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Updating instance_info_cache with network_info: [{"id": "b3b3ebd7-0f64-4a86-b249-876c5962725c", "address": "fa:16:3e:ca:a0:f1", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3b3ebd7-0f", "ovs_interfaceid": "b3b3ebd7-0f64-4a86-b249-876c5962725c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "dc748df7-150d-4b34-a259-782775725005", "address": "fa:16:3e:92:eb:c8", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc748df7-15", "ovs_interfaceid": "dc748df7-150d-4b34-a259-782775725005", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1089.399333] env[68674]: DEBUG oslo_vmware.api [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240811, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.940529} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.399713] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 9b8aad00-0980-4752-954a-c09c9ae6f9ec/9b8aad00-0980-4752-954a-c09c9ae6f9ec.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1089.400374] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1089.400374] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-591c9214-d37d-453c-b2f9-efab5e331411 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.408209] env[68674]: DEBUG oslo_vmware.api [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1089.408209] env[68674]: value = "task-3240813" [ 1089.408209] env[68674]: _type = "Task" [ 1089.408209] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.412617] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8a21c609-4905-41c2-b76c-703655039c64 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.424396] env[68674]: DEBUG oslo_vmware.api [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240813, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.425252] env[68674]: DEBUG oslo_concurrency.lockutils [None req-64a47058-f0cb-4a99-bb84-c0407c2bc8d0 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "caed484b-6fb0-41f2-a35f-8f85117dcf15" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.904s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1089.428441] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1058da1-5642-4549-b727-fe5790e677e5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.469161] env[68674]: DEBUG nova.compute.manager [req-3cb766da-7936-4ac1-9de1-6a720725da8d req-9f7c8130-4bc1-45e2-b262-08e0023d66ab service nova] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Detach interface failed, port_id=d5b4166a-1c05-4ad8-a9f4-697517d72f6c, reason: Instance 4214e971-ca72-4c9f-a355-78e5ad8d8219 could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1089.495307] env[68674]: INFO nova.compute.manager [-] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Took 1.38 seconds to deallocate network for instance. [ 1089.793762] env[68674]: DEBUG oslo_concurrency.lockutils [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "2a7a6269-65a8-402c-b174-a4a46d20a33a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1089.793762] env[68674]: DEBUG oslo_concurrency.lockutils [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "2a7a6269-65a8-402c-b174-a4a46d20a33a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1089.793762] env[68674]: DEBUG oslo_concurrency.lockutils [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "2a7a6269-65a8-402c-b174-a4a46d20a33a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1089.793762] env[68674]: DEBUG oslo_concurrency.lockutils [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "2a7a6269-65a8-402c-b174-a4a46d20a33a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1089.793762] env[68674]: DEBUG oslo_concurrency.lockutils [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "2a7a6269-65a8-402c-b174-a4a46d20a33a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1089.794873] env[68674]: INFO nova.compute.manager [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Terminating instance [ 1089.831016] env[68674]: DEBUG oslo_concurrency.lockutils [req-d71d36cf-4240-45c5-bd7a-0592b4977eb6 req-f881fbe4-f944-4fea-9245-11252d6401dc service nova] Releasing lock "refresh_cache-bd3ae195-6e01-49d5-9fcf-9520273d9108" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1089.919425] env[68674]: DEBUG oslo_vmware.api [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240813, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.110263} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.919702] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1089.920520] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fb17a38-1040-462e-aa58-bf83d3f3d0fb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.944574] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] 9b8aad00-0980-4752-954a-c09c9ae6f9ec/9b8aad00-0980-4752-954a-c09c9ae6f9ec.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1089.946422] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d386a5b-cdd0-408a-9deb-5b9c7ecd211b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.974365] env[68674]: DEBUG oslo_vmware.api [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1089.974365] env[68674]: value = "task-3240816" [ 1089.974365] env[68674]: _type = "Task" [ 1089.974365] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.986521] env[68674]: DEBUG oslo_vmware.api [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240816, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.002476] env[68674]: DEBUG oslo_concurrency.lockutils [None req-21fcbad6-3b02-4e29-b016-59643eddee1d tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1090.002852] env[68674]: DEBUG oslo_concurrency.lockutils [None req-21fcbad6-3b02-4e29-b016-59643eddee1d tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1090.003117] env[68674]: DEBUG nova.objects.instance [None req-21fcbad6-3b02-4e29-b016-59643eddee1d tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Lazy-loading 'resources' on Instance uuid 4214e971-ca72-4c9f-a355-78e5ad8d8219 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1090.299967] env[68674]: DEBUG nova.compute.manager [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1090.300257] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1090.300526] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6659156e-4e7c-4935-8976-c14368c044cf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.310334] env[68674]: DEBUG oslo_vmware.api [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1090.310334] env[68674]: value = "task-3240817" [ 1090.310334] env[68674]: _type = "Task" [ 1090.310334] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.319913] env[68674]: DEBUG oslo_vmware.api [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240817, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.330406] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "5384c82b-a584-430f-8ef1-e2731562b5ff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1090.330703] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "5384c82b-a584-430f-8ef1-e2731562b5ff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1090.465386] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a88e7e19-fe05-4c97-8d90-05d7e22dd6de tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "interface-bd3ae195-6e01-49d5-9fcf-9520273d9108-dc748df7-150d-4b34-a259-782775725005" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1090.465781] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a88e7e19-fe05-4c97-8d90-05d7e22dd6de tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "interface-bd3ae195-6e01-49d5-9fcf-9520273d9108-dc748df7-150d-4b34-a259-782775725005" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1090.488313] env[68674]: DEBUG oslo_vmware.api [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240816, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.720033] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e14c73f-7828-4b16-9ead-2971264007cf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.728208] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ede1d2a-69b9-451a-b51e-3e07cc10cf78 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.760374] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-756cc957-cc19-4815-b5ed-32ad6f535e73 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.764720] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a140c3a0-65de-4426-a310-485e6cdad007 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "caed484b-6fb0-41f2-a35f-8f85117dcf15" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1090.764960] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a140c3a0-65de-4426-a310-485e6cdad007 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "caed484b-6fb0-41f2-a35f-8f85117dcf15" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1090.765285] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a140c3a0-65de-4426-a310-485e6cdad007 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "caed484b-6fb0-41f2-a35f-8f85117dcf15-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1090.765393] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a140c3a0-65de-4426-a310-485e6cdad007 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "caed484b-6fb0-41f2-a35f-8f85117dcf15-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1090.765527] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a140c3a0-65de-4426-a310-485e6cdad007 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "caed484b-6fb0-41f2-a35f-8f85117dcf15-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.769688] env[68674]: INFO nova.compute.manager [None req-a140c3a0-65de-4426-a310-485e6cdad007 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Terminating instance [ 1090.772119] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-470782cc-b705-4e93-88aa-35ed13bd448c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.789999] env[68674]: DEBUG nova.compute.provider_tree [None req-21fcbad6-3b02-4e29-b016-59643eddee1d tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1090.820600] env[68674]: DEBUG oslo_vmware.api [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240817, 'name': PowerOffVM_Task, 'duration_secs': 0.262509} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.820941] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1090.821244] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Volume detach. Driver type: vmdk {{(pid=68674) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1090.821447] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647666', 'volume_id': 'b2708da3-c713-480b-98a2-3fe862ad5593', 'name': 'volume-b2708da3-c713-480b-98a2-3fe862ad5593', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '2a7a6269-65a8-402c-b174-a4a46d20a33a', 'attached_at': '2025-04-03T08:15:44.000000', 'detached_at': '', 'volume_id': 'b2708da3-c713-480b-98a2-3fe862ad5593', 'serial': 'b2708da3-c713-480b-98a2-3fe862ad5593'} {{(pid=68674) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1090.822270] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2efca0cf-9219-4f94-bcc0-2eb931a92548 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.844963] env[68674]: DEBUG nova.compute.manager [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1090.848953] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-812749ee-df81-45c0-9613-2edfaa794311 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.857674] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55bfd3bb-a4e0-4fa8-ad28-0de5b5058483 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.879076] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0d34fd9-2621-4eea-b9b0-f358fb2d5d1a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.895876] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] The volume has not been displaced from its original location: [datastore1] volume-b2708da3-c713-480b-98a2-3fe862ad5593/volume-b2708da3-c713-480b-98a2-3fe862ad5593.vmdk. No consolidation needed. {{(pid=68674) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1090.901530] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Reconfiguring VM instance instance-00000058 to detach disk 2001 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1090.902549] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e36d24aa-0a16-45a0-acda-c79dc2962b6c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.922229] env[68674]: DEBUG oslo_vmware.api [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1090.922229] env[68674]: value = "task-3240818" [ 1090.922229] env[68674]: _type = "Task" [ 1090.922229] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.932969] env[68674]: DEBUG oslo_vmware.api [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240818, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.968986] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a88e7e19-fe05-4c97-8d90-05d7e22dd6de tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "bd3ae195-6e01-49d5-9fcf-9520273d9108" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.969176] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a88e7e19-fe05-4c97-8d90-05d7e22dd6de tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquired lock "bd3ae195-6e01-49d5-9fcf-9520273d9108" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1090.970101] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33112782-f39d-4b29-81c2-eb509bcffc43 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.992854] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e588c75-a5b8-43f8-ad20-377ff6c90534 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.000950] env[68674]: DEBUG oslo_vmware.api [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240816, 'name': ReconfigVM_Task, 'duration_secs': 0.671011} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.027659] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Reconfigured VM instance instance-0000006e to attach disk [datastore1] 9b8aad00-0980-4752-954a-c09c9ae6f9ec/9b8aad00-0980-4752-954a-c09c9ae6f9ec.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1091.034222] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a88e7e19-fe05-4c97-8d90-05d7e22dd6de tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Reconfiguring VM to detach interface {{(pid=68674) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1091.034222] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-14b44993-ec2a-427d-a24f-eb3b781cdaec {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.036121] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5468504f-8b08-415e-a23e-b0056be17026 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.056872] env[68674]: DEBUG oslo_vmware.api [None req-a88e7e19-fe05-4c97-8d90-05d7e22dd6de tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 1091.056872] env[68674]: value = "task-3240820" [ 1091.056872] env[68674]: _type = "Task" [ 1091.056872] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.058495] env[68674]: DEBUG oslo_vmware.api [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1091.058495] env[68674]: value = "task-3240819" [ 1091.058495] env[68674]: _type = "Task" [ 1091.058495] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.071278] env[68674]: DEBUG oslo_vmware.api [None req-a88e7e19-fe05-4c97-8d90-05d7e22dd6de tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240820, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.073384] env[68674]: DEBUG oslo_vmware.api [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240819, 'name': Rename_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.279917] env[68674]: DEBUG nova.compute.manager [None req-a140c3a0-65de-4426-a310-485e6cdad007 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1091.280203] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a140c3a0-65de-4426-a310-485e6cdad007 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1091.281207] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7786c5c9-54d7-4cf7-a4ca-ad71442a287f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.290009] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-a140c3a0-65de-4426-a310-485e6cdad007 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1091.290391] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bda48726-e2be-44c6-a1a4-6b4f90f5c8e7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.293193] env[68674]: DEBUG nova.scheduler.client.report [None req-21fcbad6-3b02-4e29-b016-59643eddee1d tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1091.302643] env[68674]: DEBUG oslo_vmware.api [None req-a140c3a0-65de-4426-a310-485e6cdad007 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1091.302643] env[68674]: value = "task-3240822" [ 1091.302643] env[68674]: _type = "Task" [ 1091.302643] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.313435] env[68674]: DEBUG oslo_vmware.api [None req-a140c3a0-65de-4426-a310-485e6cdad007 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240822, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.365990] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1091.433101] env[68674]: DEBUG oslo_vmware.api [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240818, 'name': ReconfigVM_Task, 'duration_secs': 0.240536} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.434033] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Reconfigured VM instance instance-00000058 to detach disk 2001 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1091.439626] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff1ffa97-4ab7-436f-a46e-64e1e235a6ae {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.458135] env[68674]: DEBUG oslo_vmware.api [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1091.458135] env[68674]: value = "task-3240823" [ 1091.458135] env[68674]: _type = "Task" [ 1091.458135] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.467295] env[68674]: DEBUG oslo_vmware.api [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240823, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.572171] env[68674]: DEBUG oslo_vmware.api [None req-a88e7e19-fe05-4c97-8d90-05d7e22dd6de tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240820, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.575459] env[68674]: DEBUG oslo_vmware.api [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240819, 'name': Rename_Task, 'duration_secs': 0.205785} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.575778] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1091.576042] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7cf15bb6-c127-46a3-8468-8755e31db0eb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.584101] env[68674]: DEBUG oslo_vmware.api [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1091.584101] env[68674]: value = "task-3240825" [ 1091.584101] env[68674]: _type = "Task" [ 1091.584101] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.593798] env[68674]: DEBUG oslo_vmware.api [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240825, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.799299] env[68674]: DEBUG oslo_concurrency.lockutils [None req-21fcbad6-3b02-4e29-b016-59643eddee1d tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.796s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1091.801986] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.436s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1091.803544] env[68674]: INFO nova.compute.claims [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1091.817594] env[68674]: DEBUG oslo_vmware.api [None req-a140c3a0-65de-4426-a310-485e6cdad007 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240822, 'name': PowerOffVM_Task, 'duration_secs': 0.322121} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.818049] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-a140c3a0-65de-4426-a310-485e6cdad007 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1091.818342] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a140c3a0-65de-4426-a310-485e6cdad007 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1091.818724] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d539b111-4192-44c9-a159-5aa559958595 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.822092] env[68674]: INFO nova.scheduler.client.report [None req-21fcbad6-3b02-4e29-b016-59643eddee1d tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Deleted allocations for instance 4214e971-ca72-4c9f-a355-78e5ad8d8219 [ 1091.900952] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a140c3a0-65de-4426-a310-485e6cdad007 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1091.901214] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a140c3a0-65de-4426-a310-485e6cdad007 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1091.901487] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-a140c3a0-65de-4426-a310-485e6cdad007 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Deleting the datastore file [datastore2] caed484b-6fb0-41f2-a35f-8f85117dcf15 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1091.901791] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aa7e8aec-5f34-4b38-9c9f-1a9b708b8ccb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.910165] env[68674]: DEBUG oslo_vmware.api [None req-a140c3a0-65de-4426-a310-485e6cdad007 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1091.910165] env[68674]: value = "task-3240827" [ 1091.910165] env[68674]: _type = "Task" [ 1091.910165] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.918588] env[68674]: DEBUG oslo_vmware.api [None req-a140c3a0-65de-4426-a310-485e6cdad007 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240827, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.970129] env[68674]: DEBUG oslo_vmware.api [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240823, 'name': ReconfigVM_Task, 'duration_secs': 0.147031} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.970449] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647666', 'volume_id': 'b2708da3-c713-480b-98a2-3fe862ad5593', 'name': 'volume-b2708da3-c713-480b-98a2-3fe862ad5593', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '2a7a6269-65a8-402c-b174-a4a46d20a33a', 'attached_at': '2025-04-03T08:15:44.000000', 'detached_at': '', 'volume_id': 'b2708da3-c713-480b-98a2-3fe862ad5593', 'serial': 'b2708da3-c713-480b-98a2-3fe862ad5593'} {{(pid=68674) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1091.970760] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1091.971621] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d32b82d-2074-40bb-9daa-85340ab3c7d6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.979126] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1091.979357] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4521f18c-8cda-4ff1-8332-548720c37103 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.068393] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1092.068656] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1092.068894] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Deleting the datastore file [datastore1] 2a7a6269-65a8-402c-b174-a4a46d20a33a {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1092.069284] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0529afcc-d7f7-42a1-8c53-43a496b489b3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.075120] env[68674]: DEBUG oslo_vmware.api [None req-a88e7e19-fe05-4c97-8d90-05d7e22dd6de tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240820, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.081653] env[68674]: DEBUG oslo_vmware.api [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1092.081653] env[68674]: value = "task-3240829" [ 1092.081653] env[68674]: _type = "Task" [ 1092.081653] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.093917] env[68674]: DEBUG oslo_vmware.api [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240829, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.097039] env[68674]: DEBUG oslo_vmware.api [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240825, 'name': PowerOnVM_Task, 'duration_secs': 0.497997} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.097307] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1092.097512] env[68674]: INFO nova.compute.manager [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Took 8.58 seconds to spawn the instance on the hypervisor. [ 1092.097696] env[68674]: DEBUG nova.compute.manager [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1092.098463] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4da9ac53-032e-4479-80e5-bc0c6a7d9a6f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.330561] env[68674]: DEBUG oslo_concurrency.lockutils [None req-21fcbad6-3b02-4e29-b016-59643eddee1d tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Lock "4214e971-ca72-4c9f-a355-78e5ad8d8219" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.872s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1092.420268] env[68674]: DEBUG oslo_vmware.api [None req-a140c3a0-65de-4426-a310-485e6cdad007 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240827, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.239026} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.420636] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-a140c3a0-65de-4426-a310-485e6cdad007 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1092.420719] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a140c3a0-65de-4426-a310-485e6cdad007 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1092.420907] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a140c3a0-65de-4426-a310-485e6cdad007 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1092.421096] env[68674]: INFO nova.compute.manager [None req-a140c3a0-65de-4426-a310-485e6cdad007 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1092.421350] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a140c3a0-65de-4426-a310-485e6cdad007 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1092.421552] env[68674]: DEBUG nova.compute.manager [-] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1092.421643] env[68674]: DEBUG nova.network.neutron [-] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1092.572236] env[68674]: DEBUG oslo_vmware.api [None req-a88e7e19-fe05-4c97-8d90-05d7e22dd6de tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240820, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.591343] env[68674]: DEBUG oslo_vmware.api [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240829, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.259606} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.591593] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1092.591781] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1092.591960] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1092.592154] env[68674]: INFO nova.compute.manager [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Took 2.29 seconds to destroy the instance on the hypervisor. [ 1092.592390] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1092.592631] env[68674]: DEBUG nova.compute.manager [-] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1092.592719] env[68674]: DEBUG nova.network.neutron [-] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1092.617853] env[68674]: INFO nova.compute.manager [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Took 16.87 seconds to build instance. [ 1092.671779] env[68674]: DEBUG nova.compute.manager [req-dacd0e01-2ada-4239-8964-eb1bfe8f4101 req-f331b825-710b-4b42-ba6f-ea310481fb0d service nova] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Received event network-vif-deleted-01a0c28d-5fb2-423a-a0d2-6ff82d2c0f17 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1092.671999] env[68674]: INFO nova.compute.manager [req-dacd0e01-2ada-4239-8964-eb1bfe8f4101 req-f331b825-710b-4b42-ba6f-ea310481fb0d service nova] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Neutron deleted interface 01a0c28d-5fb2-423a-a0d2-6ff82d2c0f17; detaching it from the instance and deleting it from the info cache [ 1092.672222] env[68674]: DEBUG nova.network.neutron [req-dacd0e01-2ada-4239-8964-eb1bfe8f4101 req-f331b825-710b-4b42-ba6f-ea310481fb0d service nova] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.673557] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5e5c5139-7f5c-4755-96c3-c88e2502f173 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Acquiring lock "e9bebb3b-78ff-42b1-a350-efd1db5c6eaa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1092.673736] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5e5c5139-7f5c-4755-96c3-c88e2502f173 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Lock "e9bebb3b-78ff-42b1-a350-efd1db5c6eaa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1092.673921] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5e5c5139-7f5c-4755-96c3-c88e2502f173 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Acquiring lock "e9bebb3b-78ff-42b1-a350-efd1db5c6eaa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1092.674118] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5e5c5139-7f5c-4755-96c3-c88e2502f173 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Lock "e9bebb3b-78ff-42b1-a350-efd1db5c6eaa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1092.674291] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5e5c5139-7f5c-4755-96c3-c88e2502f173 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Lock "e9bebb3b-78ff-42b1-a350-efd1db5c6eaa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1092.676152] env[68674]: INFO nova.compute.manager [None req-5e5c5139-7f5c-4755-96c3-c88e2502f173 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Terminating instance [ 1092.991902] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43540a35-3fa6-44f6-8307-6ad5b1610761 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.001271] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-035a553f-ec92-4e82-a276-714bd0fb9843 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.037566] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6c4b369-a7b9-4609-a76c-71c8371e24ad {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.041372] env[68674]: DEBUG nova.compute.manager [req-baf0525b-c8d2-4bc0-8070-64dffda46ac9 req-d5e250f2-81ee-46d7-8ea4-56add1cbf79d service nova] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Received event network-vif-deleted-4fae8d88-2aaa-48bd-b0c4-72bc768efce3 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1093.041592] env[68674]: INFO nova.compute.manager [req-baf0525b-c8d2-4bc0-8070-64dffda46ac9 req-d5e250f2-81ee-46d7-8ea4-56add1cbf79d service nova] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Neutron deleted interface 4fae8d88-2aaa-48bd-b0c4-72bc768efce3; detaching it from the instance and deleting it from the info cache [ 1093.041767] env[68674]: DEBUG nova.network.neutron [req-baf0525b-c8d2-4bc0-8070-64dffda46ac9 req-d5e250f2-81ee-46d7-8ea4-56add1cbf79d service nova] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.049414] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b46ca7e-e1dc-4a18-9c6b-63f9876d3aac {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.065791] env[68674]: DEBUG nova.compute.provider_tree [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1093.076100] env[68674]: DEBUG oslo_vmware.api [None req-a88e7e19-fe05-4c97-8d90-05d7e22dd6de tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240820, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.119669] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8097797b-92f5-4bdd-b2fa-e5a97c96d7b6 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lock "9b8aad00-0980-4752-954a-c09c9ae6f9ec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.376s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1093.144740] env[68674]: DEBUG nova.network.neutron [-] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.178701] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-caa3d62d-6118-495a-91ec-58e7b4e4e2c3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.183666] env[68674]: DEBUG nova.compute.manager [None req-5e5c5139-7f5c-4755-96c3-c88e2502f173 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1093.183666] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5c5139-7f5c-4755-96c3-c88e2502f173 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1093.188341] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c1016b4-f9a1-4a24-a843-aac30397039a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.203862] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb4b921b-fa29-45c5-8800-583097de9128 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.215991] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e5c5139-7f5c-4755-96c3-c88e2502f173 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1093.218633] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e3e633b5-f369-4394-b24d-bbf06f0407c5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.224868] env[68674]: DEBUG oslo_vmware.api [None req-5e5c5139-7f5c-4755-96c3-c88e2502f173 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1093.224868] env[68674]: value = "task-3240830" [ 1093.224868] env[68674]: _type = "Task" [ 1093.224868] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.246366] env[68674]: DEBUG oslo_vmware.api [None req-5e5c5139-7f5c-4755-96c3-c88e2502f173 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240830, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.259523] env[68674]: DEBUG nova.compute.manager [req-dacd0e01-2ada-4239-8964-eb1bfe8f4101 req-f331b825-710b-4b42-ba6f-ea310481fb0d service nova] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Detach interface failed, port_id=01a0c28d-5fb2-423a-a0d2-6ff82d2c0f17, reason: Instance caed484b-6fb0-41f2-a35f-8f85117dcf15 could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1093.544972] env[68674]: DEBUG nova.network.neutron [-] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.545853] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a78bb858-be02-4bfd-b8f4-b6c258a3cda1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.568752] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b83b74d4-67d7-49cb-b5f2-859bf25965ab {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.599812] env[68674]: DEBUG oslo_vmware.api [None req-a88e7e19-fe05-4c97-8d90-05d7e22dd6de tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240820, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.607279] env[68674]: ERROR nova.scheduler.client.report [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [req-9a0f707b-7762-4e87-a5ec-b2bbbf781ae4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ade3f042-7427-494b-9654-0b65e074850c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-9a0f707b-7762-4e87-a5ec-b2bbbf781ae4"}]} [ 1093.620795] env[68674]: DEBUG nova.compute.manager [req-baf0525b-c8d2-4bc0-8070-64dffda46ac9 req-d5e250f2-81ee-46d7-8ea4-56add1cbf79d service nova] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Detach interface failed, port_id=4fae8d88-2aaa-48bd-b0c4-72bc768efce3, reason: Instance 2a7a6269-65a8-402c-b174-a4a46d20a33a could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1093.623845] env[68674]: DEBUG nova.scheduler.client.report [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Refreshing inventories for resource provider ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1093.647674] env[68674]: INFO nova.compute.manager [-] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Took 1.23 seconds to deallocate network for instance. [ 1093.655581] env[68674]: DEBUG nova.scheduler.client.report [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Updating ProviderTree inventory for provider ade3f042-7427-494b-9654-0b65e074850c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1093.655815] env[68674]: DEBUG nova.compute.provider_tree [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1093.678706] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b4a64045-00a9-49a6-889e-0f55df737a48 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "30731a3c-34ba-40c8-9b8f-2d867eff4f21" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1093.678961] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b4a64045-00a9-49a6-889e-0f55df737a48 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "30731a3c-34ba-40c8-9b8f-2d867eff4f21" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1093.679262] env[68674]: INFO nova.compute.manager [None req-b4a64045-00a9-49a6-889e-0f55df737a48 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Rebooting instance [ 1093.684124] env[68674]: DEBUG nova.scheduler.client.report [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Refreshing aggregate associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, aggregates: None {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1093.709175] env[68674]: DEBUG nova.scheduler.client.report [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Refreshing trait associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1093.735909] env[68674]: DEBUG oslo_vmware.api [None req-5e5c5139-7f5c-4755-96c3-c88e2502f173 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240830, 'name': PowerOffVM_Task, 'duration_secs': 0.298127} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.736202] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e5c5139-7f5c-4755-96c3-c88e2502f173 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1093.736376] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5c5139-7f5c-4755-96c3-c88e2502f173 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1093.736630] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-be8a43c7-3e30-419f-a5aa-0fef294caf60 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.745043] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-d34cef2b-56e8-4e42-9360-c96ede400c2f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Volume attach. Driver type: vmdk {{(pid=68674) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1093.745310] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-d34cef2b-56e8-4e42-9360-c96ede400c2f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647688', 'volume_id': '38e9aac1-85e2-4fee-b30d-95805ec4d8bb', 'name': 'volume-38e9aac1-85e2-4fee-b30d-95805ec4d8bb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d', 'attached_at': '', 'detached_at': '', 'volume_id': '38e9aac1-85e2-4fee-b30d-95805ec4d8bb', 'serial': '38e9aac1-85e2-4fee-b30d-95805ec4d8bb'} {{(pid=68674) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1093.746258] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-396d0860-a682-406d-9ee5-1ebc4576c2b2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.768770] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb344935-f562-4afb-844d-644455ac23f0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.796028] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-d34cef2b-56e8-4e42-9360-c96ede400c2f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] volume-38e9aac1-85e2-4fee-b30d-95805ec4d8bb/volume-38e9aac1-85e2-4fee-b30d-95805ec4d8bb.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1093.798853] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-22632f93-9226-407c-a8ea-1e0622c2ccab {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.822034] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5c5139-7f5c-4755-96c3-c88e2502f173 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1093.822259] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5c5139-7f5c-4755-96c3-c88e2502f173 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1093.822489] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e5c5139-7f5c-4755-96c3-c88e2502f173 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Deleting the datastore file [datastore2] e9bebb3b-78ff-42b1-a350-efd1db5c6eaa {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1093.823814] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-06bf9da8-4411-4f50-ad06-815e32639e38 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.826028] env[68674]: DEBUG oslo_vmware.api [None req-d34cef2b-56e8-4e42-9360-c96ede400c2f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1093.826028] env[68674]: value = "task-3240833" [ 1093.826028] env[68674]: _type = "Task" [ 1093.826028] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.837032] env[68674]: DEBUG oslo_vmware.api [None req-5e5c5139-7f5c-4755-96c3-c88e2502f173 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for the task: (returnval){ [ 1093.837032] env[68674]: value = "task-3240834" [ 1093.837032] env[68674]: _type = "Task" [ 1093.837032] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.840421] env[68674]: DEBUG oslo_vmware.api [None req-d34cef2b-56e8-4e42-9360-c96ede400c2f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240833, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.853898] env[68674]: DEBUG oslo_vmware.api [None req-5e5c5139-7f5c-4755-96c3-c88e2502f173 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240834, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.961798] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f43dab2a-dc96-4be7-a8a3-b2b523aa1bf9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.971282] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01264de9-233b-4edb-b864-836cf00a9fb9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.003057] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43b75c3b-3115-4ec0-a1d6-676322da1690 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.011446] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65111344-5c42-407f-adde-58efe12988ae {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.027285] env[68674]: DEBUG nova.compute.provider_tree [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1094.052596] env[68674]: INFO nova.compute.manager [-] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Took 1.46 seconds to deallocate network for instance. [ 1094.098428] env[68674]: DEBUG oslo_vmware.api [None req-a88e7e19-fe05-4c97-8d90-05d7e22dd6de tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240820, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.155511] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a140c3a0-65de-4426-a310-485e6cdad007 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1094.195343] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b4a64045-00a9-49a6-889e-0f55df737a48 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "refresh_cache-30731a3c-34ba-40c8-9b8f-2d867eff4f21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1094.195534] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b4a64045-00a9-49a6-889e-0f55df737a48 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquired lock "refresh_cache-30731a3c-34ba-40c8-9b8f-2d867eff4f21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1094.195739] env[68674]: DEBUG nova.network.neutron [None req-b4a64045-00a9-49a6-889e-0f55df737a48 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1094.340024] env[68674]: DEBUG oslo_vmware.api [None req-d34cef2b-56e8-4e42-9360-c96ede400c2f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240833, 'name': ReconfigVM_Task, 'duration_secs': 0.477884} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.340024] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-d34cef2b-56e8-4e42-9360-c96ede400c2f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Reconfigured VM instance instance-0000006c to attach disk [datastore1] volume-38e9aac1-85e2-4fee-b30d-95805ec4d8bb/volume-38e9aac1-85e2-4fee-b30d-95805ec4d8bb.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1094.344416] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4262246c-7441-4a2e-83e9-33abafbf476a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.365147] env[68674]: DEBUG oslo_vmware.api [None req-5e5c5139-7f5c-4755-96c3-c88e2502f173 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Task: {'id': task-3240834, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.331185} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.366493] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e5c5139-7f5c-4755-96c3-c88e2502f173 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1094.366685] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5c5139-7f5c-4755-96c3-c88e2502f173 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1094.366911] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5c5139-7f5c-4755-96c3-c88e2502f173 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1094.367140] env[68674]: INFO nova.compute.manager [None req-5e5c5139-7f5c-4755-96c3-c88e2502f173 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1094.367394] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5e5c5139-7f5c-4755-96c3-c88e2502f173 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1094.367648] env[68674]: DEBUG oslo_vmware.api [None req-d34cef2b-56e8-4e42-9360-c96ede400c2f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1094.367648] env[68674]: value = "task-3240835" [ 1094.367648] env[68674]: _type = "Task" [ 1094.367648] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.367850] env[68674]: DEBUG nova.compute.manager [-] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1094.367956] env[68674]: DEBUG nova.network.neutron [-] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1094.379166] env[68674]: DEBUG oslo_vmware.api [None req-d34cef2b-56e8-4e42-9360-c96ede400c2f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240835, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.551911] env[68674]: ERROR nova.scheduler.client.report [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [req-f24c315d-33f6-42a9-a12b-dbd4bcb942cf] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID ade3f042-7427-494b-9654-0b65e074850c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f24c315d-33f6-42a9-a12b-dbd4bcb942cf"}]} [ 1094.562695] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquiring lock "d77d24ac-b44d-4014-83eb-f486db74ab0b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1094.562947] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lock "d77d24ac-b44d-4014-83eb-f486db74ab0b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1094.570975] env[68674]: DEBUG nova.scheduler.client.report [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Refreshing inventories for resource provider ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1094.593342] env[68674]: DEBUG nova.scheduler.client.report [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Updating ProviderTree inventory for provider ade3f042-7427-494b-9654-0b65e074850c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1094.593596] env[68674]: DEBUG nova.compute.provider_tree [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1094.606667] env[68674]: DEBUG oslo_vmware.api [None req-a88e7e19-fe05-4c97-8d90-05d7e22dd6de tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240820, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.609576] env[68674]: DEBUG nova.scheduler.client.report [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Refreshing aggregate associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, aggregates: None {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1094.612243] env[68674]: INFO nova.compute.manager [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Took 0.56 seconds to detach 1 volumes for instance. [ 1094.634666] env[68674]: DEBUG nova.scheduler.client.report [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Refreshing trait associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1094.854057] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6265e12-757f-497e-b291-3e799dbeeb62 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.864230] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54568510-f982-45c4-893c-8db19efc14eb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.906944] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e588cc04-06cb-4253-91a5-af1f11e3a3b3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.912348] env[68674]: DEBUG oslo_vmware.api [None req-d34cef2b-56e8-4e42-9360-c96ede400c2f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240835, 'name': ReconfigVM_Task, 'duration_secs': 0.160319} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.912939] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-d34cef2b-56e8-4e42-9360-c96ede400c2f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647688', 'volume_id': '38e9aac1-85e2-4fee-b30d-95805ec4d8bb', 'name': 'volume-38e9aac1-85e2-4fee-b30d-95805ec4d8bb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d', 'attached_at': '', 'detached_at': '', 'volume_id': '38e9aac1-85e2-4fee-b30d-95805ec4d8bb', 'serial': '38e9aac1-85e2-4fee-b30d-95805ec4d8bb'} {{(pid=68674) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1094.917657] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e285551-1743-4d9d-b4ae-53b10f2178b0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.932859] env[68674]: DEBUG nova.compute.provider_tree [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1094.976665] env[68674]: DEBUG nova.network.neutron [None req-b4a64045-00a9-49a6-889e-0f55df737a48 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Updating instance_info_cache with network_info: [{"id": "fa6ef6fe-e229-4cc2-8230-7318adaa728e", "address": "fa:16:3e:6c:f7:a2", "network": {"id": "f2a6b57a-fec9-4bd2-9828-2b72f21f2393", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1479923638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.236", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f2a133c72064227bd419d63d5d9557f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa6ef6fe-e2", "ovs_interfaceid": "fa6ef6fe-e229-4cc2-8230-7318adaa728e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1095.058387] env[68674]: DEBUG nova.compute.manager [req-d3d5abc0-23af-4d58-a42a-2c158e982904 req-3c93c0da-0698-4b42-a647-43dac753a5e6 service nova] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Received event network-vif-deleted-e9c61ddf-0f69-4349-8c95-a0246ea52982 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1095.058547] env[68674]: INFO nova.compute.manager [req-d3d5abc0-23af-4d58-a42a-2c158e982904 req-3c93c0da-0698-4b42-a647-43dac753a5e6 service nova] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Neutron deleted interface e9c61ddf-0f69-4349-8c95-a0246ea52982; detaching it from the instance and deleting it from the info cache [ 1095.058737] env[68674]: DEBUG nova.network.neutron [req-d3d5abc0-23af-4d58-a42a-2c158e982904 req-3c93c0da-0698-4b42-a647-43dac753a5e6 service nova] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1095.065425] env[68674]: DEBUG nova.compute.manager [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1095.097700] env[68674]: DEBUG oslo_vmware.api [None req-a88e7e19-fe05-4c97-8d90-05d7e22dd6de tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240820, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.120244] env[68674]: DEBUG oslo_concurrency.lockutils [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1095.120537] env[68674]: DEBUG nova.network.neutron [-] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1095.463691] env[68674]: DEBUG nova.scheduler.client.report [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Updated inventory for provider ade3f042-7427-494b-9654-0b65e074850c with generation 157 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1095.463948] env[68674]: DEBUG nova.compute.provider_tree [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Updating resource provider ade3f042-7427-494b-9654-0b65e074850c generation from 157 to 158 during operation: update_inventory {{(pid=68674) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1095.464150] env[68674]: DEBUG nova.compute.provider_tree [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1095.479293] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b4a64045-00a9-49a6-889e-0f55df737a48 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Releasing lock "refresh_cache-30731a3c-34ba-40c8-9b8f-2d867eff4f21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1095.561505] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ca73f3a3-e3ba-45ae-ab12-9c1bb4c5c96d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.574696] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d95eecf-b241-4655-bcd2-69e57dc18d99 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.601242] env[68674]: DEBUG oslo_vmware.api [None req-a88e7e19-fe05-4c97-8d90-05d7e22dd6de tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240820, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.605527] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1095.619274] env[68674]: DEBUG nova.compute.manager [req-d3d5abc0-23af-4d58-a42a-2c158e982904 req-3c93c0da-0698-4b42-a647-43dac753a5e6 service nova] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Detach interface failed, port_id=e9c61ddf-0f69-4349-8c95-a0246ea52982, reason: Instance e9bebb3b-78ff-42b1-a350-efd1db5c6eaa could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1095.623110] env[68674]: INFO nova.compute.manager [-] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Took 1.25 seconds to deallocate network for instance. [ 1095.954344] env[68674]: DEBUG nova.objects.instance [None req-d34cef2b-56e8-4e42-9360-c96ede400c2f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lazy-loading 'flavor' on Instance uuid 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1095.969046] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.167s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1095.969560] env[68674]: DEBUG nova.compute.manager [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1095.972641] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a140c3a0-65de-4426-a310-485e6cdad007 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.817s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1095.972641] env[68674]: DEBUG nova.objects.instance [None req-a140c3a0-65de-4426-a310-485e6cdad007 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lazy-loading 'resources' on Instance uuid caed484b-6fb0-41f2-a35f-8f85117dcf15 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1095.982655] env[68674]: DEBUG nova.compute.manager [None req-b4a64045-00a9-49a6-889e-0f55df737a48 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1095.983506] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6436624-5e07-4c9c-81db-2a72f7bb0996 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.101269] env[68674]: DEBUG oslo_vmware.api [None req-a88e7e19-fe05-4c97-8d90-05d7e22dd6de tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240820, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.128831] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5e5c5139-7f5c-4755-96c3-c88e2502f173 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1096.460510] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d34cef2b-56e8-4e42-9360-c96ede400c2f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.825s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1096.475073] env[68674]: DEBUG nova.compute.utils [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1096.480280] env[68674]: DEBUG nova.compute.manager [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1096.480280] env[68674]: DEBUG nova.network.neutron [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1096.517634] env[68674]: DEBUG nova.policy [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7b3a4c2c5bae41998d58a116e648883d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aa34d6d90c6d46aaa2cb77259b5e0c27', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 1096.602518] env[68674]: DEBUG oslo_vmware.api [None req-a88e7e19-fe05-4c97-8d90-05d7e22dd6de tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240820, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.697270] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8084a328-4918-43c1-bc19-041a6de1afb8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.707538] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f17250a-b643-4435-b41a-ce3c4970a3ff {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.740157] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7646731-1bc8-4af7-b2b4-3a74bb11da46 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.748819] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acdbf2ee-b08e-4f71-b4c5-526ff829387d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.764215] env[68674]: DEBUG nova.compute.provider_tree [None req-a140c3a0-65de-4426-a310-485e6cdad007 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1096.866139] env[68674]: DEBUG nova.network.neutron [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Successfully created port: f536bcae-200f-4668-94f8-520d08d06653 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1096.980244] env[68674]: DEBUG nova.compute.manager [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1097.002599] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21f5ab71-26e8-4112-8c14-77efc3862290 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.011573] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b4a64045-00a9-49a6-889e-0f55df737a48 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Doing hard reboot of VM {{(pid=68674) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1097.011855] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-dd537031-88dd-44cb-93f4-7825cd10e882 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.020286] env[68674]: DEBUG oslo_vmware.api [None req-b4a64045-00a9-49a6-889e-0f55df737a48 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1097.020286] env[68674]: value = "task-3240837" [ 1097.020286] env[68674]: _type = "Task" [ 1097.020286] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.030834] env[68674]: DEBUG oslo_vmware.api [None req-b4a64045-00a9-49a6-889e-0f55df737a48 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3240837, 'name': ResetVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.055038] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1097.055310] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1097.055526] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1097.055805] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1097.056025] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1097.058062] env[68674]: INFO nova.compute.manager [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Terminating instance [ 1097.102925] env[68674]: DEBUG oslo_vmware.api [None req-a88e7e19-fe05-4c97-8d90-05d7e22dd6de tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240820, 'name': ReconfigVM_Task, 'duration_secs': 5.799079} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.103183] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a88e7e19-fe05-4c97-8d90-05d7e22dd6de tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Releasing lock "bd3ae195-6e01-49d5-9fcf-9520273d9108" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1097.103392] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-a88e7e19-fe05-4c97-8d90-05d7e22dd6de tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Reconfigured VM to detach interface {{(pid=68674) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1097.267275] env[68674]: DEBUG nova.scheduler.client.report [None req-a140c3a0-65de-4426-a310-485e6cdad007 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1097.534176] env[68674]: DEBUG oslo_vmware.api [None req-b4a64045-00a9-49a6-889e-0f55df737a48 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3240837, 'name': ResetVM_Task, 'duration_secs': 0.105074} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.534456] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b4a64045-00a9-49a6-889e-0f55df737a48 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Did hard reboot of VM {{(pid=68674) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1097.534704] env[68674]: DEBUG nova.compute.manager [None req-b4a64045-00a9-49a6-889e-0f55df737a48 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1097.535554] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32bbe31f-fb52-4007-9193-03b9068242ff {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.561544] env[68674]: DEBUG nova.compute.manager [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1097.561677] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1097.561917] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-805be616-9b34-42ee-9f22-0451a4ddc3fa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.571783] env[68674]: DEBUG oslo_vmware.api [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1097.571783] env[68674]: value = "task-3240838" [ 1097.571783] env[68674]: _type = "Task" [ 1097.571783] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.581385] env[68674]: DEBUG oslo_vmware.api [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240838, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.773066] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a140c3a0-65de-4426-a310-485e6cdad007 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.801s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1097.775352] env[68674]: DEBUG oslo_concurrency.lockutils [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.655s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1097.775685] env[68674]: DEBUG oslo_concurrency.lockutils [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1097.777610] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.172s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1097.779559] env[68674]: INFO nova.compute.claims [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1097.803491] env[68674]: INFO nova.scheduler.client.report [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Deleted allocations for instance 2a7a6269-65a8-402c-b174-a4a46d20a33a [ 1097.807220] env[68674]: INFO nova.scheduler.client.report [None req-a140c3a0-65de-4426-a310-485e6cdad007 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Deleted allocations for instance caed484b-6fb0-41f2-a35f-8f85117dcf15 [ 1097.993424] env[68674]: DEBUG nova.compute.manager [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1098.019892] env[68674]: DEBUG nova.virt.hardware [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1098.020172] env[68674]: DEBUG nova.virt.hardware [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1098.020334] env[68674]: DEBUG nova.virt.hardware [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1098.020515] env[68674]: DEBUG nova.virt.hardware [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1098.020663] env[68674]: DEBUG nova.virt.hardware [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1098.020812] env[68674]: DEBUG nova.virt.hardware [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1098.021033] env[68674]: DEBUG nova.virt.hardware [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1098.021204] env[68674]: DEBUG nova.virt.hardware [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1098.021478] env[68674]: DEBUG nova.virt.hardware [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1098.021651] env[68674]: DEBUG nova.virt.hardware [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1098.021895] env[68674]: DEBUG nova.virt.hardware [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1098.022740] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b68b7c23-e6c3-4b2f-9114-bd74ee15d129 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.031831] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3da8fb0-314b-4f17-a2b2-c27cee8b240d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.053091] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b4a64045-00a9-49a6-889e-0f55df737a48 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "30731a3c-34ba-40c8-9b8f-2d867eff4f21" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.374s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1098.083988] env[68674]: DEBUG oslo_vmware.api [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240838, 'name': PowerOffVM_Task, 'duration_secs': 0.253673} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.083988] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1098.083988] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Volume detach. Driver type: vmdk {{(pid=68674) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1098.083988] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647688', 'volume_id': '38e9aac1-85e2-4fee-b30d-95805ec4d8bb', 'name': 'volume-38e9aac1-85e2-4fee-b30d-95805ec4d8bb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d', 'attached_at': '', 'detached_at': '', 'volume_id': '38e9aac1-85e2-4fee-b30d-95805ec4d8bb', 'serial': '38e9aac1-85e2-4fee-b30d-95805ec4d8bb'} {{(pid=68674) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1098.084859] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bf315c3-678c-485e-bd4a-9719fbeab413 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.107504] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f79a097b-ff23-4db0-8603-e2d1f78e91d9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.115837] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9892e5f1-bd6e-4eff-afce-2e695f8885ca {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.141760] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e68da7d-832b-433b-8aef-d1d6f0fd91c7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.166686] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] The volume has not been displaced from its original location: [datastore1] volume-38e9aac1-85e2-4fee-b30d-95805ec4d8bb/volume-38e9aac1-85e2-4fee-b30d-95805ec4d8bb.vmdk. No consolidation needed. {{(pid=68674) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1098.175543] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Reconfiguring VM instance instance-0000006c to detach disk 2001 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1098.176308] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6001579a-357a-4083-b3f7-852363e1b77f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.199064] env[68674]: DEBUG oslo_vmware.api [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1098.199064] env[68674]: value = "task-3240839" [ 1098.199064] env[68674]: _type = "Task" [ 1098.199064] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.207934] env[68674]: DEBUG oslo_vmware.api [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240839, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.314083] env[68674]: DEBUG oslo_concurrency.lockutils [None req-17aa7253-46cb-456c-9d5b-267b92d2f8d0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "2a7a6269-65a8-402c-b174-a4a46d20a33a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.522s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1098.318992] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a140c3a0-65de-4426-a310-485e6cdad007 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "caed484b-6fb0-41f2-a35f-8f85117dcf15" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.552s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1098.396829] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a88e7e19-fe05-4c97-8d90-05d7e22dd6de tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "refresh_cache-bd3ae195-6e01-49d5-9fcf-9520273d9108" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.397056] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a88e7e19-fe05-4c97-8d90-05d7e22dd6de tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquired lock "refresh_cache-bd3ae195-6e01-49d5-9fcf-9520273d9108" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1098.397257] env[68674]: DEBUG nova.network.neutron [None req-a88e7e19-fe05-4c97-8d90-05d7e22dd6de tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1098.542110] env[68674]: DEBUG nova.network.neutron [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Successfully updated port: f536bcae-200f-4668-94f8-520d08d06653 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1098.570102] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "c876b288-de2a-4195-bfef-88f38e219d9a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1098.570394] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "c876b288-de2a-4195-bfef-88f38e219d9a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1098.629169] env[68674]: DEBUG nova.compute.manager [req-dc9d187f-a763-417a-a3ba-38f877d8f239 req-6c698626-13a4-4629-86d1-4bfcd6d396fb service nova] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Received event network-vif-plugged-f536bcae-200f-4668-94f8-520d08d06653 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1098.629169] env[68674]: DEBUG oslo_concurrency.lockutils [req-dc9d187f-a763-417a-a3ba-38f877d8f239 req-6c698626-13a4-4629-86d1-4bfcd6d396fb service nova] Acquiring lock "5384c82b-a584-430f-8ef1-e2731562b5ff-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1098.629169] env[68674]: DEBUG oslo_concurrency.lockutils [req-dc9d187f-a763-417a-a3ba-38f877d8f239 req-6c698626-13a4-4629-86d1-4bfcd6d396fb service nova] Lock "5384c82b-a584-430f-8ef1-e2731562b5ff-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1098.629169] env[68674]: DEBUG oslo_concurrency.lockutils [req-dc9d187f-a763-417a-a3ba-38f877d8f239 req-6c698626-13a4-4629-86d1-4bfcd6d396fb service nova] Lock "5384c82b-a584-430f-8ef1-e2731562b5ff-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1098.629169] env[68674]: DEBUG nova.compute.manager [req-dc9d187f-a763-417a-a3ba-38f877d8f239 req-6c698626-13a4-4629-86d1-4bfcd6d396fb service nova] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] No waiting events found dispatching network-vif-plugged-f536bcae-200f-4668-94f8-520d08d06653 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1098.629169] env[68674]: WARNING nova.compute.manager [req-dc9d187f-a763-417a-a3ba-38f877d8f239 req-6c698626-13a4-4629-86d1-4bfcd6d396fb service nova] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Received unexpected event network-vif-plugged-f536bcae-200f-4668-94f8-520d08d06653 for instance with vm_state building and task_state spawning. [ 1098.709948] env[68674]: DEBUG oslo_vmware.api [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240839, 'name': ReconfigVM_Task, 'duration_secs': 0.270712} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.710267] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Reconfigured VM instance instance-0000006c to detach disk 2001 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1098.715418] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cfbf4c3e-8c51-4c9b-a9bd-86e60039fa34 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.732397] env[68674]: DEBUG oslo_vmware.api [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1098.732397] env[68674]: value = "task-3240840" [ 1098.732397] env[68674]: _type = "Task" [ 1098.732397] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.741438] env[68674]: DEBUG oslo_vmware.api [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240840, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.008689] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a63cb502-bf02-4850-a146-4a03c820253a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.018080] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24348087-a212-4cf1-8621-6f8cb2ed886d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.058682] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c91c1efc-3855-475d-b5eb-59da7ac18efc tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "bd3ae195-6e01-49d5-9fcf-9520273d9108" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1099.059041] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c91c1efc-3855-475d-b5eb-59da7ac18efc tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "bd3ae195-6e01-49d5-9fcf-9520273d9108" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1099.059289] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c91c1efc-3855-475d-b5eb-59da7ac18efc tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "bd3ae195-6e01-49d5-9fcf-9520273d9108-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1099.059748] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c91c1efc-3855-475d-b5eb-59da7ac18efc tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "bd3ae195-6e01-49d5-9fcf-9520273d9108-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1099.059748] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c91c1efc-3855-475d-b5eb-59da7ac18efc tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "bd3ae195-6e01-49d5-9fcf-9520273d9108-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1099.061689] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "refresh_cache-5384c82b-a584-430f-8ef1-e2731562b5ff" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.061824] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquired lock "refresh_cache-5384c82b-a584-430f-8ef1-e2731562b5ff" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1099.061978] env[68674]: DEBUG nova.network.neutron [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1099.065947] env[68674]: INFO nova.compute.manager [None req-c91c1efc-3855-475d-b5eb-59da7ac18efc tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Terminating instance [ 1099.072098] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de45037a-cd99-47ef-8aae-212d55d0129f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.076096] env[68674]: DEBUG nova.compute.manager [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1099.093600] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58c57d24-2fa3-4f58-9e1c-cb100a4f3ec6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.098524] env[68674]: DEBUG oslo_concurrency.lockutils [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "0de73fea-8e2d-47ff-a87e-a83708f9b4ad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1099.098829] env[68674]: DEBUG oslo_concurrency.lockutils [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "0de73fea-8e2d-47ff-a87e-a83708f9b4ad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1099.112441] env[68674]: DEBUG nova.compute.provider_tree [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1099.196605] env[68674]: DEBUG oslo_concurrency.lockutils [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "8f183286-f908-4d05-9a61-d6b1bf10dfb9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1099.196850] env[68674]: DEBUG oslo_concurrency.lockutils [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "8f183286-f908-4d05-9a61-d6b1bf10dfb9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1099.244307] env[68674]: DEBUG oslo_vmware.api [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240840, 'name': ReconfigVM_Task, 'duration_secs': 0.167938} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.244536] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647688', 'volume_id': '38e9aac1-85e2-4fee-b30d-95805ec4d8bb', 'name': 'volume-38e9aac1-85e2-4fee-b30d-95805ec4d8bb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d', 'attached_at': '', 'detached_at': '', 'volume_id': '38e9aac1-85e2-4fee-b30d-95805ec4d8bb', 'serial': '38e9aac1-85e2-4fee-b30d-95805ec4d8bb'} {{(pid=68674) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1099.244918] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1099.245735] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f2edd4e-73c1-409b-ba8c-86621e82ebe2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.252722] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1099.252947] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8677efc8-d5d1-4e1b-bfe3-507b2dd7234b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.348300] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1099.348591] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1099.348734] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Deleting the datastore file [datastore2] 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1099.349134] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b4a53808-720c-409c-abe7-ac42a33bebb6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.357587] env[68674]: DEBUG oslo_vmware.api [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1099.357587] env[68674]: value = "task-3240842" [ 1099.357587] env[68674]: _type = "Task" [ 1099.357587] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.366093] env[68674]: DEBUG oslo_vmware.api [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240842, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.383599] env[68674]: INFO nova.network.neutron [None req-a88e7e19-fe05-4c97-8d90-05d7e22dd6de tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Port dc748df7-150d-4b34-a259-782775725005 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1099.384018] env[68674]: DEBUG nova.network.neutron [None req-a88e7e19-fe05-4c97-8d90-05d7e22dd6de tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Updating instance_info_cache with network_info: [{"id": "b3b3ebd7-0f64-4a86-b249-876c5962725c", "address": "fa:16:3e:ca:a0:f1", "network": {"id": "a803f1d7-ea36-4d0a-9a85-9b7a8d27f698", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-772405508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21163cbc3a5a4dc3abc832c4560c33e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3b3ebd7-0f", "ovs_interfaceid": "b3b3ebd7-0f64-4a86-b249-876c5962725c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.585377] env[68674]: DEBUG nova.compute.manager [None req-c91c1efc-3855-475d-b5eb-59da7ac18efc tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1099.585666] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c91c1efc-3855-475d-b5eb-59da7ac18efc tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1099.589037] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-349dc461-c287-4066-82fe-8ba31eccd0ff {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.595247] env[68674]: DEBUG nova.network.neutron [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1099.599832] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c91c1efc-3855-475d-b5eb-59da7ac18efc tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1099.600177] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b0ac811f-4ef0-4ace-ba95-364aa31e86d6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.602242] env[68674]: DEBUG nova.compute.manager [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1099.607015] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1099.612030] env[68674]: DEBUG oslo_vmware.api [None req-c91c1efc-3855-475d-b5eb-59da7ac18efc tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 1099.612030] env[68674]: value = "task-3240843" [ 1099.612030] env[68674]: _type = "Task" [ 1099.612030] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.617923] env[68674]: DEBUG nova.scheduler.client.report [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1099.628077] env[68674]: DEBUG oslo_vmware.api [None req-c91c1efc-3855-475d-b5eb-59da7ac18efc tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240843, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.699280] env[68674]: DEBUG nova.compute.manager [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1099.806236] env[68674]: DEBUG nova.network.neutron [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Updating instance_info_cache with network_info: [{"id": "f536bcae-200f-4668-94f8-520d08d06653", "address": "fa:16:3e:4a:df:86", "network": {"id": "2141da47-c6b2-4270-9d0f-d999f7c26b83", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-460904531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa34d6d90c6d46aaa2cb77259b5e0c27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b9aabc7c-0f6c-42eb-bd27-493a1496c0c8", "external-id": "nsx-vlan-transportzone-368", "segmentation_id": 368, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf536bcae-20", "ovs_interfaceid": "f536bcae-200f-4668-94f8-520d08d06653", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.868565] env[68674]: DEBUG oslo_vmware.api [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240842, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.32971} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.868820] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1099.868999] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1099.869220] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1099.869396] env[68674]: INFO nova.compute.manager [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Took 2.31 seconds to destroy the instance on the hypervisor. [ 1099.869636] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1099.869838] env[68674]: DEBUG nova.compute.manager [-] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1099.869976] env[68674]: DEBUG nova.network.neutron [-] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1099.886489] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a88e7e19-fe05-4c97-8d90-05d7e22dd6de tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Releasing lock "refresh_cache-bd3ae195-6e01-49d5-9fcf-9520273d9108" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1100.121861] env[68674]: DEBUG oslo_vmware.api [None req-c91c1efc-3855-475d-b5eb-59da7ac18efc tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240843, 'name': PowerOffVM_Task, 'duration_secs': 0.21156} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.122295] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-c91c1efc-3855-475d-b5eb-59da7ac18efc tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1100.122446] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c91c1efc-3855-475d-b5eb-59da7ac18efc tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1100.123350] env[68674]: DEBUG oslo_concurrency.lockutils [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1100.123982] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.346s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1100.124453] env[68674]: DEBUG nova.compute.manager [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1100.127223] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6af475b8-b937-4f87-9ceb-6f1000b506ed {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.128852] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5e5c5139-7f5c-4755-96c3-c88e2502f173 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1100.129079] env[68674]: DEBUG nova.objects.instance [None req-5e5c5139-7f5c-4755-96c3-c88e2502f173 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Lazy-loading 'resources' on Instance uuid e9bebb3b-78ff-42b1-a350-efd1db5c6eaa {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1100.219012] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c91c1efc-3855-475d-b5eb-59da7ac18efc tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1100.219282] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c91c1efc-3855-475d-b5eb-59da7ac18efc tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1100.219471] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-c91c1efc-3855-475d-b5eb-59da7ac18efc tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Deleting the datastore file [datastore1] bd3ae195-6e01-49d5-9fcf-9520273d9108 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1100.220475] env[68674]: DEBUG oslo_concurrency.lockutils [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1100.220723] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-813ea165-e6cf-403e-8bcf-49f389d1ef02 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.239278] env[68674]: DEBUG oslo_vmware.api [None req-c91c1efc-3855-475d-b5eb-59da7ac18efc tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 1100.239278] env[68674]: value = "task-3240845" [ 1100.239278] env[68674]: _type = "Task" [ 1100.239278] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.247366] env[68674]: DEBUG oslo_vmware.api [None req-c91c1efc-3855-475d-b5eb-59da7ac18efc tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240845, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.308627] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Releasing lock "refresh_cache-5384c82b-a584-430f-8ef1-e2731562b5ff" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1100.309049] env[68674]: DEBUG nova.compute.manager [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Instance network_info: |[{"id": "f536bcae-200f-4668-94f8-520d08d06653", "address": "fa:16:3e:4a:df:86", "network": {"id": "2141da47-c6b2-4270-9d0f-d999f7c26b83", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-460904531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa34d6d90c6d46aaa2cb77259b5e0c27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b9aabc7c-0f6c-42eb-bd27-493a1496c0c8", "external-id": "nsx-vlan-transportzone-368", "segmentation_id": 368, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf536bcae-20", "ovs_interfaceid": "f536bcae-200f-4668-94f8-520d08d06653", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1100.309529] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4a:df:86', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b9aabc7c-0f6c-42eb-bd27-493a1496c0c8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f536bcae-200f-4668-94f8-520d08d06653', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1100.317526] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1100.317764] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1100.318132] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-836a5432-acd1-4e7b-b611-8f1e8df62a41 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.342255] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1100.342255] env[68674]: value = "task-3240846" [ 1100.342255] env[68674]: _type = "Task" [ 1100.342255] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.350887] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240846, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.390209] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a88e7e19-fe05-4c97-8d90-05d7e22dd6de tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "interface-bd3ae195-6e01-49d5-9fcf-9520273d9108-dc748df7-150d-4b34-a259-782775725005" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.924s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1100.589844] env[68674]: DEBUG nova.network.neutron [-] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1100.629903] env[68674]: DEBUG nova.compute.utils [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1100.631206] env[68674]: DEBUG nova.compute.manager [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1100.631426] env[68674]: DEBUG nova.network.neutron [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1100.665059] env[68674]: DEBUG nova.compute.manager [req-dd288c0e-feec-40e4-9b4a-698e9667174d req-f1cf2453-59ff-4ebe-b6fc-c180792f726f service nova] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Received event network-changed-f536bcae-200f-4668-94f8-520d08d06653 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1100.665347] env[68674]: DEBUG nova.compute.manager [req-dd288c0e-feec-40e4-9b4a-698e9667174d req-f1cf2453-59ff-4ebe-b6fc-c180792f726f service nova] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Refreshing instance network info cache due to event network-changed-f536bcae-200f-4668-94f8-520d08d06653. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1100.665452] env[68674]: DEBUG oslo_concurrency.lockutils [req-dd288c0e-feec-40e4-9b4a-698e9667174d req-f1cf2453-59ff-4ebe-b6fc-c180792f726f service nova] Acquiring lock "refresh_cache-5384c82b-a584-430f-8ef1-e2731562b5ff" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1100.665553] env[68674]: DEBUG oslo_concurrency.lockutils [req-dd288c0e-feec-40e4-9b4a-698e9667174d req-f1cf2453-59ff-4ebe-b6fc-c180792f726f service nova] Acquired lock "refresh_cache-5384c82b-a584-430f-8ef1-e2731562b5ff" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1100.665707] env[68674]: DEBUG nova.network.neutron [req-dd288c0e-feec-40e4-9b4a-698e9667174d req-f1cf2453-59ff-4ebe-b6fc-c180792f726f service nova] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Refreshing network info cache for port f536bcae-200f-4668-94f8-520d08d06653 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1100.687534] env[68674]: DEBUG nova.policy [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f7571ead1f304133b1e10221669af666', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fcfc3ecd6aa74705aefa88d7a95361a0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 1100.749150] env[68674]: DEBUG oslo_vmware.api [None req-c91c1efc-3855-475d-b5eb-59da7ac18efc tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240845, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.178036} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.751607] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-c91c1efc-3855-475d-b5eb-59da7ac18efc tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1100.751800] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c91c1efc-3855-475d-b5eb-59da7ac18efc tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1100.752255] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-c91c1efc-3855-475d-b5eb-59da7ac18efc tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1100.752255] env[68674]: INFO nova.compute.manager [None req-c91c1efc-3855-475d-b5eb-59da7ac18efc tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1100.752417] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c91c1efc-3855-475d-b5eb-59da7ac18efc tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1100.752903] env[68674]: DEBUG nova.compute.manager [-] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1100.753016] env[68674]: DEBUG nova.network.neutron [-] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1100.850628] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-671c879c-c38c-49d2-bf9c-e74174b92090 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.864453] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240846, 'name': CreateVM_Task, 'duration_secs': 0.344044} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.865498] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d99db4-2ba2-43e6-817f-655aac5f2e88 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.868445] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1100.869324] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1100.869514] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1100.869826] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1100.870407] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6038c693-486e-45f7-9449-e86960e394c1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.904195] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abf46b1c-3d70-447a-812d-bdb36b99ebe6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.906294] env[68674]: DEBUG oslo_vmware.api [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1100.906294] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c0b7ad-0354-4b86-7e22-13000cb18248" [ 1100.906294] env[68674]: _type = "Task" [ 1100.906294] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.913587] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f13a931a-ea62-4600-bc3c-ac87d0c949b0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.921160] env[68674]: DEBUG oslo_vmware.api [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c0b7ad-0354-4b86-7e22-13000cb18248, 'name': SearchDatastore_Task, 'duration_secs': 0.010175} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.924117] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1100.924406] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1100.924671] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1100.924894] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1100.925110] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1100.925397] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e790fe65-b352-4c12-b1d2-f6273449b336 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.937388] env[68674]: DEBUG nova.compute.provider_tree [None req-5e5c5139-7f5c-4755-96c3-c88e2502f173 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1100.946693] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1100.946897] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1100.947663] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2208717-0488-414c-b134-d86bc1f4a499 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.954294] env[68674]: DEBUG oslo_vmware.api [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1100.954294] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]526bd1da-9933-fcf9-0366-8bca53c73457" [ 1100.954294] env[68674]: _type = "Task" [ 1100.954294] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.965466] env[68674]: DEBUG oslo_vmware.api [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]526bd1da-9933-fcf9-0366-8bca53c73457, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.998787] env[68674]: DEBUG nova.network.neutron [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Successfully created port: b383a330-22c5-4c9b-8d66-f4916b6f0977 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1101.092397] env[68674]: INFO nova.compute.manager [-] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Took 1.22 seconds to deallocate network for instance. [ 1101.137567] env[68674]: DEBUG nova.compute.manager [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1101.440864] env[68674]: DEBUG nova.scheduler.client.report [None req-5e5c5139-7f5c-4755-96c3-c88e2502f173 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1101.464660] env[68674]: DEBUG oslo_vmware.api [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]526bd1da-9933-fcf9-0366-8bca53c73457, 'name': SearchDatastore_Task, 'duration_secs': 0.011404} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.466241] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2aa77726-225d-4463-9135-8e6051eb4d7a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.473195] env[68674]: DEBUG oslo_vmware.api [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1101.473195] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a7d576-ce91-daca-e270-a5db821e638b" [ 1101.473195] env[68674]: _type = "Task" [ 1101.473195] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.482774] env[68674]: DEBUG oslo_vmware.api [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a7d576-ce91-daca-e270-a5db821e638b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.486110] env[68674]: DEBUG nova.network.neutron [req-dd288c0e-feec-40e4-9b4a-698e9667174d req-f1cf2453-59ff-4ebe-b6fc-c180792f726f service nova] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Updated VIF entry in instance network info cache for port f536bcae-200f-4668-94f8-520d08d06653. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1101.486644] env[68674]: DEBUG nova.network.neutron [req-dd288c0e-feec-40e4-9b4a-698e9667174d req-f1cf2453-59ff-4ebe-b6fc-c180792f726f service nova] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Updating instance_info_cache with network_info: [{"id": "f536bcae-200f-4668-94f8-520d08d06653", "address": "fa:16:3e:4a:df:86", "network": {"id": "2141da47-c6b2-4270-9d0f-d999f7c26b83", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-460904531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa34d6d90c6d46aaa2cb77259b5e0c27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b9aabc7c-0f6c-42eb-bd27-493a1496c0c8", "external-id": "nsx-vlan-transportzone-368", "segmentation_id": 368, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf536bcae-20", "ovs_interfaceid": "f536bcae-200f-4668-94f8-520d08d06653", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1101.653782] env[68674]: INFO nova.compute.manager [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Took 0.56 seconds to detach 1 volumes for instance. [ 1101.676041] env[68674]: DEBUG nova.compute.manager [req-d44ecc2b-1884-4a17-845b-4326d074e077 req-45f4f3cd-65f3-4909-8d49-7671731a8cfc service nova] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Received event network-vif-deleted-b3b3ebd7-0f64-4a86-b249-876c5962725c {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1101.676265] env[68674]: INFO nova.compute.manager [req-d44ecc2b-1884-4a17-845b-4326d074e077 req-45f4f3cd-65f3-4909-8d49-7671731a8cfc service nova] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Neutron deleted interface b3b3ebd7-0f64-4a86-b249-876c5962725c; detaching it from the instance and deleting it from the info cache [ 1101.676598] env[68674]: DEBUG nova.network.neutron [req-d44ecc2b-1884-4a17-845b-4326d074e077 req-45f4f3cd-65f3-4909-8d49-7671731a8cfc service nova] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1101.948347] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5e5c5139-7f5c-4755-96c3-c88e2502f173 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.819s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1101.950848] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.344s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1101.952361] env[68674]: INFO nova.compute.claims [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1101.970282] env[68674]: INFO nova.scheduler.client.report [None req-5e5c5139-7f5c-4755-96c3-c88e2502f173 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Deleted allocations for instance e9bebb3b-78ff-42b1-a350-efd1db5c6eaa [ 1101.984415] env[68674]: DEBUG oslo_vmware.api [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a7d576-ce91-daca-e270-a5db821e638b, 'name': SearchDatastore_Task, 'duration_secs': 0.01868} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.984676] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1101.984971] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 5384c82b-a584-430f-8ef1-e2731562b5ff/5384c82b-a584-430f-8ef1-e2731562b5ff.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1101.985619] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-35a87037-fc82-47fe-be6a-77020d96193f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.990167] env[68674]: DEBUG oslo_concurrency.lockutils [req-dd288c0e-feec-40e4-9b4a-698e9667174d req-f1cf2453-59ff-4ebe-b6fc-c180792f726f service nova] Releasing lock "refresh_cache-5384c82b-a584-430f-8ef1-e2731562b5ff" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1101.990361] env[68674]: DEBUG nova.compute.manager [req-dd288c0e-feec-40e4-9b4a-698e9667174d req-f1cf2453-59ff-4ebe-b6fc-c180792f726f service nova] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Received event network-vif-deleted-9fd41ca9-247f-4f5f-8749-60983c1e212a {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1101.993924] env[68674]: DEBUG oslo_vmware.api [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1101.993924] env[68674]: value = "task-3240847" [ 1101.993924] env[68674]: _type = "Task" [ 1101.993924] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.003737] env[68674]: DEBUG oslo_vmware.api [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240847, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.150110] env[68674]: DEBUG nova.compute.manager [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1102.152721] env[68674]: DEBUG nova.network.neutron [-] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1102.161465] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1102.180399] env[68674]: DEBUG nova.virt.hardware [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1102.180664] env[68674]: DEBUG nova.virt.hardware [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1102.180845] env[68674]: DEBUG nova.virt.hardware [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1102.181070] env[68674]: DEBUG nova.virt.hardware [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1102.181240] env[68674]: DEBUG nova.virt.hardware [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1102.181411] env[68674]: DEBUG nova.virt.hardware [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1102.181633] env[68674]: DEBUG nova.virt.hardware [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1102.181901] env[68674]: DEBUG nova.virt.hardware [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1102.181976] env[68674]: DEBUG nova.virt.hardware [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1102.182259] env[68674]: DEBUG nova.virt.hardware [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1102.182367] env[68674]: DEBUG nova.virt.hardware [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1102.184542] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e5fed26-ed6a-4e25-9922-cb5ef998f968 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.187018] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ba3f42b2-3d21-4c5b-9df2-204a9dff3280 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.200740] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0b07824-d779-4a6e-b7d8-cdecfafa55be {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.210845] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34ab11e8-04ef-447b-8259-bbff4ecb9b06 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.250867] env[68674]: DEBUG nova.compute.manager [req-d44ecc2b-1884-4a17-845b-4326d074e077 req-45f4f3cd-65f3-4909-8d49-7671731a8cfc service nova] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Detach interface failed, port_id=b3b3ebd7-0f64-4a86-b249-876c5962725c, reason: Instance bd3ae195-6e01-49d5-9fcf-9520273d9108 could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1102.424348] env[68674]: DEBUG nova.compute.manager [req-89d6929c-3344-4123-8bce-46a77e3b9bd4 req-72fba1a3-d7b3-4d9c-8b59-5c4cfa2c376d service nova] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Received event network-vif-plugged-b383a330-22c5-4c9b-8d66-f4916b6f0977 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1102.424754] env[68674]: DEBUG oslo_concurrency.lockutils [req-89d6929c-3344-4123-8bce-46a77e3b9bd4 req-72fba1a3-d7b3-4d9c-8b59-5c4cfa2c376d service nova] Acquiring lock "d77d24ac-b44d-4014-83eb-f486db74ab0b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1102.425086] env[68674]: DEBUG oslo_concurrency.lockutils [req-89d6929c-3344-4123-8bce-46a77e3b9bd4 req-72fba1a3-d7b3-4d9c-8b59-5c4cfa2c376d service nova] Lock "d77d24ac-b44d-4014-83eb-f486db74ab0b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1102.425269] env[68674]: DEBUG oslo_concurrency.lockutils [req-89d6929c-3344-4123-8bce-46a77e3b9bd4 req-72fba1a3-d7b3-4d9c-8b59-5c4cfa2c376d service nova] Lock "d77d24ac-b44d-4014-83eb-f486db74ab0b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1102.425505] env[68674]: DEBUG nova.compute.manager [req-89d6929c-3344-4123-8bce-46a77e3b9bd4 req-72fba1a3-d7b3-4d9c-8b59-5c4cfa2c376d service nova] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] No waiting events found dispatching network-vif-plugged-b383a330-22c5-4c9b-8d66-f4916b6f0977 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1102.425913] env[68674]: WARNING nova.compute.manager [req-89d6929c-3344-4123-8bce-46a77e3b9bd4 req-72fba1a3-d7b3-4d9c-8b59-5c4cfa2c376d service nova] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Received unexpected event network-vif-plugged-b383a330-22c5-4c9b-8d66-f4916b6f0977 for instance with vm_state building and task_state spawning. [ 1102.482290] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5e5c5139-7f5c-4755-96c3-c88e2502f173 tempest-ServerRescueTestJSON-24621824 tempest-ServerRescueTestJSON-24621824-project-member] Lock "e9bebb3b-78ff-42b1-a350-efd1db5c6eaa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.808s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1102.505870] env[68674]: DEBUG oslo_vmware.api [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240847, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.482792} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.506899] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 5384c82b-a584-430f-8ef1-e2731562b5ff/5384c82b-a584-430f-8ef1-e2731562b5ff.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1102.506899] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1102.506899] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7bd4cfe0-cdd7-4561-9d54-06038854e0af {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.514908] env[68674]: DEBUG oslo_vmware.api [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1102.514908] env[68674]: value = "task-3240848" [ 1102.514908] env[68674]: _type = "Task" [ 1102.514908] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.525428] env[68674]: DEBUG oslo_vmware.api [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240848, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.655469] env[68674]: INFO nova.compute.manager [-] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Took 1.90 seconds to deallocate network for instance. [ 1103.022455] env[68674]: DEBUG nova.network.neutron [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Successfully updated port: b383a330-22c5-4c9b-8d66-f4916b6f0977 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1103.032132] env[68674]: DEBUG oslo_vmware.api [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240848, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063326} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.032616] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1103.034078] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-356dfc8d-9984-4627-9cef-acf756409194 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.044291] env[68674]: DEBUG nova.compute.manager [req-017794cc-8bb4-4a86-bedc-c362eb4f3112 req-38cec04c-42fd-4cc9-8790-71ef4180e32b service nova] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Received event network-changed-b383a330-22c5-4c9b-8d66-f4916b6f0977 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1103.044406] env[68674]: DEBUG nova.compute.manager [req-017794cc-8bb4-4a86-bedc-c362eb4f3112 req-38cec04c-42fd-4cc9-8790-71ef4180e32b service nova] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Refreshing instance network info cache due to event network-changed-b383a330-22c5-4c9b-8d66-f4916b6f0977. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1103.044627] env[68674]: DEBUG oslo_concurrency.lockutils [req-017794cc-8bb4-4a86-bedc-c362eb4f3112 req-38cec04c-42fd-4cc9-8790-71ef4180e32b service nova] Acquiring lock "refresh_cache-d77d24ac-b44d-4014-83eb-f486db74ab0b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1103.045088] env[68674]: DEBUG oslo_concurrency.lockutils [req-017794cc-8bb4-4a86-bedc-c362eb4f3112 req-38cec04c-42fd-4cc9-8790-71ef4180e32b service nova] Acquired lock "refresh_cache-d77d24ac-b44d-4014-83eb-f486db74ab0b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1103.045088] env[68674]: DEBUG nova.network.neutron [req-017794cc-8bb4-4a86-bedc-c362eb4f3112 req-38cec04c-42fd-4cc9-8790-71ef4180e32b service nova] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Refreshing network info cache for port b383a330-22c5-4c9b-8d66-f4916b6f0977 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1103.069463] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Reconfiguring VM instance instance-0000006f to attach disk [datastore2] 5384c82b-a584-430f-8ef1-e2731562b5ff/5384c82b-a584-430f-8ef1-e2731562b5ff.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1103.073490] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-42e9d4a2-138a-46ca-ba5d-7e2128cf379c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.095719] env[68674]: DEBUG oslo_vmware.api [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1103.095719] env[68674]: value = "task-3240849" [ 1103.095719] env[68674]: _type = "Task" [ 1103.095719] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.106867] env[68674]: DEBUG oslo_vmware.api [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240849, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.163040] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c91c1efc-3855-475d-b5eb-59da7ac18efc tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1103.246693] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19a32453-fab5-4d48-869b-ab01d890e860 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.255907] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f47ec017-de58-4fc5-bc64-d06d79178eb0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.288786] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38bd91eb-3307-4746-8f80-562fb5d1331c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.297400] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79bd10e2-db41-43bb-8685-ca3c0b3d0066 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.312568] env[68674]: DEBUG nova.compute.provider_tree [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1103.365769] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1103.366112] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1103.366224] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1103.367019] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1103.367019] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Cleaning up deleted instances with incomplete migration {{(pid=68674) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11872}} [ 1103.525367] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquiring lock "refresh_cache-d77d24ac-b44d-4014-83eb-f486db74ab0b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1103.584646] env[68674]: DEBUG nova.network.neutron [req-017794cc-8bb4-4a86-bedc-c362eb4f3112 req-38cec04c-42fd-4cc9-8790-71ef4180e32b service nova] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1103.608116] env[68674]: DEBUG oslo_vmware.api [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240849, 'name': ReconfigVM_Task, 'duration_secs': 0.329274} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.608449] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Reconfigured VM instance instance-0000006f to attach disk [datastore2] 5384c82b-a584-430f-8ef1-e2731562b5ff/5384c82b-a584-430f-8ef1-e2731562b5ff.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1103.609256] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8527d7c6-ec88-40ff-80f4-dafda328eec0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.617949] env[68674]: DEBUG oslo_vmware.api [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1103.617949] env[68674]: value = "task-3240850" [ 1103.617949] env[68674]: _type = "Task" [ 1103.617949] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.628872] env[68674]: DEBUG oslo_vmware.api [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240850, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.744325] env[68674]: DEBUG nova.network.neutron [req-017794cc-8bb4-4a86-bedc-c362eb4f3112 req-38cec04c-42fd-4cc9-8790-71ef4180e32b service nova] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1103.816320] env[68674]: DEBUG nova.scheduler.client.report [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1104.127863] env[68674]: DEBUG oslo_vmware.api [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240850, 'name': Rename_Task, 'duration_secs': 0.174972} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.128169] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1104.128414] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8bb609a9-f243-49f2-a2ec-740684befa1d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.136456] env[68674]: DEBUG oslo_vmware.api [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1104.136456] env[68674]: value = "task-3240851" [ 1104.136456] env[68674]: _type = "Task" [ 1104.136456] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.144418] env[68674]: DEBUG oslo_vmware.api [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240851, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.247387] env[68674]: DEBUG oslo_concurrency.lockutils [req-017794cc-8bb4-4a86-bedc-c362eb4f3112 req-38cec04c-42fd-4cc9-8790-71ef4180e32b service nova] Releasing lock "refresh_cache-d77d24ac-b44d-4014-83eb-f486db74ab0b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1104.247774] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquired lock "refresh_cache-d77d24ac-b44d-4014-83eb-f486db74ab0b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1104.247938] env[68674]: DEBUG nova.network.neutron [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1104.320940] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.370s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1104.321448] env[68674]: DEBUG nova.compute.manager [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1104.324848] env[68674]: DEBUG oslo_concurrency.lockutils [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.201s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1104.326972] env[68674]: INFO nova.compute.claims [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1104.651132] env[68674]: DEBUG oslo_vmware.api [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240851, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.780978] env[68674]: DEBUG nova.network.neutron [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1104.826449] env[68674]: DEBUG nova.compute.utils [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1104.831033] env[68674]: DEBUG nova.compute.manager [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1104.831033] env[68674]: DEBUG nova.network.neutron [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1104.867715] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1104.867883] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68674) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1104.889071] env[68674]: DEBUG nova.policy [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5080a4f68ef1482caaee5aa26614e6f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c958fcb56a934ef7919b76aa2a193429', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 1104.924127] env[68674]: DEBUG nova.network.neutron [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Updating instance_info_cache with network_info: [{"id": "b383a330-22c5-4c9b-8d66-f4916b6f0977", "address": "fa:16:3e:37:94:df", "network": {"id": "dd6a13cc-564e-4e30-a518-536c9c1a1c8d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2104984174-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fcfc3ecd6aa74705aefa88d7a95361a0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb383a330-22", "ovs_interfaceid": "b383a330-22c5-4c9b-8d66-f4916b6f0977", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1105.151456] env[68674]: DEBUG oslo_vmware.api [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240851, 'name': PowerOnVM_Task, 'duration_secs': 0.711884} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.151853] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1105.152171] env[68674]: INFO nova.compute.manager [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Took 7.16 seconds to spawn the instance on the hypervisor. [ 1105.152441] env[68674]: DEBUG nova.compute.manager [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1105.153571] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-853cf049-260d-49d1-bace-281ff5e999d2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.217425] env[68674]: DEBUG nova.network.neutron [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Successfully created port: 15b18361-a1c9-4dab-bcaf-6a40837d6bbe {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1105.333702] env[68674]: DEBUG nova.compute.manager [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1105.361071] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1105.365756] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1105.426689] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Releasing lock "refresh_cache-d77d24ac-b44d-4014-83eb-f486db74ab0b" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1105.427061] env[68674]: DEBUG nova.compute.manager [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Instance network_info: |[{"id": "b383a330-22c5-4c9b-8d66-f4916b6f0977", "address": "fa:16:3e:37:94:df", "network": {"id": "dd6a13cc-564e-4e30-a518-536c9c1a1c8d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2104984174-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fcfc3ecd6aa74705aefa88d7a95361a0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb383a330-22", "ovs_interfaceid": "b383a330-22c5-4c9b-8d66-f4916b6f0977", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1105.427478] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:37:94:df', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'abcf0d10-3f3f-45dc-923e-1c78766e2dad', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b383a330-22c5-4c9b-8d66-f4916b6f0977', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1105.435436] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1105.437965] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1105.438668] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7a6ba4b8-f590-490b-9c14-8457b131061f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.462335] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1105.462335] env[68674]: value = "task-3240852" [ 1105.462335] env[68674]: _type = "Task" [ 1105.462335] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.472155] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240852, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.574028] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-915c35f3-4b9b-4f8a-a3ed-1ebab53888d9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.582414] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2582b95e-d740-44c2-a2b0-1b25bfc5752f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.615196] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-725c55f1-1c30-4d1c-a2be-31c5265d0706 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.623285] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87289d00-4026-41e6-96fc-5098bb393107 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.639990] env[68674]: DEBUG nova.compute.provider_tree [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1105.674776] env[68674]: INFO nova.compute.manager [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Took 14.32 seconds to build instance. [ 1105.842289] env[68674]: INFO nova.virt.block_device [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Booting with volume ac75a73a-75e6-493d-9caf-c35a3091e391 at /dev/sda [ 1105.884354] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-310cfa05-ee31-467f-8f58-efa89d094f06 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.894709] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-663b71a0-05f1-4f1b-b58e-b974af91c629 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.927974] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-02ab7301-986b-4f46-aa7c-ec6139cedc48 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.937267] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-871935bd-c7a3-4f4e-989c-23595086125e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.974768] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eca58f83-9b79-428a-abd1-4b508a664be3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.988102] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240852, 'name': CreateVM_Task, 'duration_secs': 0.415959} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.988373] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1105.989131] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a323d508-dfd1-44c9-a34b-f96512a5d867 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.992147] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1105.992323] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1105.992673] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1105.992988] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf3ee675-0305-4cfe-ae58-a4335866f3ed {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.998374] env[68674]: DEBUG oslo_vmware.api [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1105.998374] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5209c5c3-8b74-d925-a2d9-f3fd20305e46" [ 1105.998374] env[68674]: _type = "Task" [ 1105.998374] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.007068] env[68674]: DEBUG nova.virt.block_device [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Updating existing volume attachment record: 1a9c5916-5bf8-4965-ba43-560c7a54afe2 {{(pid=68674) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1106.012103] env[68674]: DEBUG oslo_vmware.api [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5209c5c3-8b74-d925-a2d9-f3fd20305e46, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.017749] env[68674]: DEBUG nova.compute.manager [req-ee045ca6-af35-4d18-85a6-f5886141688a req-e4219e69-9ff3-415c-9d62-2806575bb9c5 service nova] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Received event network-changed-f536bcae-200f-4668-94f8-520d08d06653 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1106.018345] env[68674]: DEBUG nova.compute.manager [req-ee045ca6-af35-4d18-85a6-f5886141688a req-e4219e69-9ff3-415c-9d62-2806575bb9c5 service nova] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Refreshing instance network info cache due to event network-changed-f536bcae-200f-4668-94f8-520d08d06653. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1106.018345] env[68674]: DEBUG oslo_concurrency.lockutils [req-ee045ca6-af35-4d18-85a6-f5886141688a req-e4219e69-9ff3-415c-9d62-2806575bb9c5 service nova] Acquiring lock "refresh_cache-5384c82b-a584-430f-8ef1-e2731562b5ff" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1106.018604] env[68674]: DEBUG oslo_concurrency.lockutils [req-ee045ca6-af35-4d18-85a6-f5886141688a req-e4219e69-9ff3-415c-9d62-2806575bb9c5 service nova] Acquired lock "refresh_cache-5384c82b-a584-430f-8ef1-e2731562b5ff" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1106.018604] env[68674]: DEBUG nova.network.neutron [req-ee045ca6-af35-4d18-85a6-f5886141688a req-e4219e69-9ff3-415c-9d62-2806575bb9c5 service nova] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Refreshing network info cache for port f536bcae-200f-4668-94f8-520d08d06653 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1106.143216] env[68674]: DEBUG nova.scheduler.client.report [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1106.177200] env[68674]: DEBUG oslo_concurrency.lockutils [None req-d2accf42-fe09-4290-91d8-39bbe64b6f6f tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "5384c82b-a584-430f-8ef1-e2731562b5ff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.846s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1106.366376] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1106.509438] env[68674]: DEBUG oslo_vmware.api [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5209c5c3-8b74-d925-a2d9-f3fd20305e46, 'name': SearchDatastore_Task, 'duration_secs': 0.023219} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.509749] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1106.509979] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1106.510803] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1106.510803] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1106.510803] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1106.510803] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a3f1034e-399b-405c-a6d0-ac566a0d98ee {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.520760] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1106.520953] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1106.524142] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-324644cb-2646-4d9a-a547-5a13fc433c51 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.530585] env[68674]: DEBUG oslo_vmware.api [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1106.530585] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52908c03-a3fb-9711-7a70-8050b89847d5" [ 1106.530585] env[68674]: _type = "Task" [ 1106.530585] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.539819] env[68674]: DEBUG oslo_vmware.api [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52908c03-a3fb-9711-7a70-8050b89847d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.650056] env[68674]: DEBUG oslo_concurrency.lockutils [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.324s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1106.650056] env[68674]: DEBUG nova.compute.manager [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1106.651844] env[68674]: DEBUG oslo_concurrency.lockutils [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.431s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1106.656605] env[68674]: INFO nova.compute.claims [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1106.676537] env[68674]: DEBUG nova.compute.manager [req-c66d237e-d511-450c-b985-2cbb450303a9 req-80c673be-f464-413b-89bb-18a809750643 service nova] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Received event network-vif-plugged-15b18361-a1c9-4dab-bcaf-6a40837d6bbe {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1106.676766] env[68674]: DEBUG oslo_concurrency.lockutils [req-c66d237e-d511-450c-b985-2cbb450303a9 req-80c673be-f464-413b-89bb-18a809750643 service nova] Acquiring lock "c876b288-de2a-4195-bfef-88f38e219d9a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1106.676971] env[68674]: DEBUG oslo_concurrency.lockutils [req-c66d237e-d511-450c-b985-2cbb450303a9 req-80c673be-f464-413b-89bb-18a809750643 service nova] Lock "c876b288-de2a-4195-bfef-88f38e219d9a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1106.677250] env[68674]: DEBUG oslo_concurrency.lockutils [req-c66d237e-d511-450c-b985-2cbb450303a9 req-80c673be-f464-413b-89bb-18a809750643 service nova] Lock "c876b288-de2a-4195-bfef-88f38e219d9a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1106.677332] env[68674]: DEBUG nova.compute.manager [req-c66d237e-d511-450c-b985-2cbb450303a9 req-80c673be-f464-413b-89bb-18a809750643 service nova] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] No waiting events found dispatching network-vif-plugged-15b18361-a1c9-4dab-bcaf-6a40837d6bbe {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1106.677512] env[68674]: WARNING nova.compute.manager [req-c66d237e-d511-450c-b985-2cbb450303a9 req-80c673be-f464-413b-89bb-18a809750643 service nova] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Received unexpected event network-vif-plugged-15b18361-a1c9-4dab-bcaf-6a40837d6bbe for instance with vm_state building and task_state block_device_mapping. [ 1106.757826] env[68674]: DEBUG nova.network.neutron [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Successfully updated port: 15b18361-a1c9-4dab-bcaf-6a40837d6bbe {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1106.759559] env[68674]: DEBUG nova.network.neutron [req-ee045ca6-af35-4d18-85a6-f5886141688a req-e4219e69-9ff3-415c-9d62-2806575bb9c5 service nova] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Updated VIF entry in instance network info cache for port f536bcae-200f-4668-94f8-520d08d06653. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1106.759907] env[68674]: DEBUG nova.network.neutron [req-ee045ca6-af35-4d18-85a6-f5886141688a req-e4219e69-9ff3-415c-9d62-2806575bb9c5 service nova] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Updating instance_info_cache with network_info: [{"id": "f536bcae-200f-4668-94f8-520d08d06653", "address": "fa:16:3e:4a:df:86", "network": {"id": "2141da47-c6b2-4270-9d0f-d999f7c26b83", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-460904531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa34d6d90c6d46aaa2cb77259b5e0c27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b9aabc7c-0f6c-42eb-bd27-493a1496c0c8", "external-id": "nsx-vlan-transportzone-368", "segmentation_id": 368, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf536bcae-20", "ovs_interfaceid": "f536bcae-200f-4668-94f8-520d08d06653", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1107.041497] env[68674]: DEBUG oslo_vmware.api [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52908c03-a3fb-9711-7a70-8050b89847d5, 'name': SearchDatastore_Task, 'duration_secs': 0.010937} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.042287] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c435ee0a-7a34-4cfe-8309-e653c67c312d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.047329] env[68674]: DEBUG oslo_vmware.api [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1107.047329] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f8d1ca-ad3e-e9b5-48ef-f21d5fcaa0b8" [ 1107.047329] env[68674]: _type = "Task" [ 1107.047329] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.054632] env[68674]: DEBUG oslo_vmware.api [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f8d1ca-ad3e-e9b5-48ef-f21d5fcaa0b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.161185] env[68674]: DEBUG nova.compute.utils [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1107.165483] env[68674]: DEBUG nova.compute.manager [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1107.165483] env[68674]: DEBUG nova.network.neutron [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1107.213256] env[68674]: DEBUG nova.policy [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e268da8edd47413b9b87909dde064f64', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0cee54e456084086866d08b098a24b64', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 1107.262131] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "refresh_cache-c876b288-de2a-4195-bfef-88f38e219d9a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1107.262394] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquired lock "refresh_cache-c876b288-de2a-4195-bfef-88f38e219d9a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1107.262623] env[68674]: DEBUG nova.network.neutron [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1107.265666] env[68674]: DEBUG oslo_concurrency.lockutils [req-ee045ca6-af35-4d18-85a6-f5886141688a req-e4219e69-9ff3-415c-9d62-2806575bb9c5 service nova] Releasing lock "refresh_cache-5384c82b-a584-430f-8ef1-e2731562b5ff" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1107.483655] env[68674]: DEBUG nova.network.neutron [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Successfully created port: bc3acca0-ced5-49ff-9839-c40f7e6bc6f9 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1107.558325] env[68674]: DEBUG oslo_vmware.api [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f8d1ca-ad3e-e9b5-48ef-f21d5fcaa0b8, 'name': SearchDatastore_Task, 'duration_secs': 0.014332} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.558561] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1107.558828] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] d77d24ac-b44d-4014-83eb-f486db74ab0b/d77d24ac-b44d-4014-83eb-f486db74ab0b.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1107.559110] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-058e70d2-c00f-4733-b921-21d67efa3ce7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.566168] env[68674]: DEBUG oslo_vmware.api [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1107.566168] env[68674]: value = "task-3240853" [ 1107.566168] env[68674]: _type = "Task" [ 1107.566168] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.574332] env[68674]: DEBUG oslo_vmware.api [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240853, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.665707] env[68674]: DEBUG nova.compute.manager [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1107.795434] env[68674]: DEBUG nova.network.neutron [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1107.850268] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2b909ee-dfb0-4162-b056-e1b17a745689 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.858739] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d62cd06-c8b3-4f8d-b828-c4a586db6efa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.893843] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7050491-799a-4591-96d5-e2c59441bac4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.904988] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e3b308c-3115-42ab-9e2b-ce88a74fd187 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.919587] env[68674]: DEBUG nova.compute.provider_tree [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1107.942440] env[68674]: DEBUG nova.network.neutron [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Updating instance_info_cache with network_info: [{"id": "15b18361-a1c9-4dab-bcaf-6a40837d6bbe", "address": "fa:16:3e:99:3b:04", "network": {"id": "eae87694-bbf6-4eed-9305-26be80e0529b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1262353116-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c958fcb56a934ef7919b76aa2a193429", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15b18361-a1", "ovs_interfaceid": "15b18361-a1c9-4dab-bcaf-6a40837d6bbe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1108.078654] env[68674]: DEBUG oslo_vmware.api [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240853, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.106585] env[68674]: DEBUG nova.compute.manager [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1108.107202] env[68674]: DEBUG nova.virt.hardware [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1108.107432] env[68674]: DEBUG nova.virt.hardware [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1108.107639] env[68674]: DEBUG nova.virt.hardware [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1108.107889] env[68674]: DEBUG nova.virt.hardware [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1108.108071] env[68674]: DEBUG nova.virt.hardware [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1108.108246] env[68674]: DEBUG nova.virt.hardware [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1108.108464] env[68674]: DEBUG nova.virt.hardware [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1108.108622] env[68674]: DEBUG nova.virt.hardware [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1108.108789] env[68674]: DEBUG nova.virt.hardware [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1108.109052] env[68674]: DEBUG nova.virt.hardware [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1108.109128] env[68674]: DEBUG nova.virt.hardware [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1108.110036] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0da27dbb-b31f-4153-8897-72a09c904c5c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.118938] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ec6ef6-744e-43f7-b0af-7a2b42aa38b3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.366290] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager.update_available_resource {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1108.422646] env[68674]: DEBUG nova.scheduler.client.report [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1108.445016] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Releasing lock "refresh_cache-c876b288-de2a-4195-bfef-88f38e219d9a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1108.445448] env[68674]: DEBUG nova.compute.manager [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Instance network_info: |[{"id": "15b18361-a1c9-4dab-bcaf-6a40837d6bbe", "address": "fa:16:3e:99:3b:04", "network": {"id": "eae87694-bbf6-4eed-9305-26be80e0529b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1262353116-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c958fcb56a934ef7919b76aa2a193429", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15b18361-a1", "ovs_interfaceid": "15b18361-a1c9-4dab-bcaf-6a40837d6bbe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1108.446391] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:3b:04', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b98c49ac-0eb7-4311-aa8f-60581b2ce706', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '15b18361-a1c9-4dab-bcaf-6a40837d6bbe', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1108.454850] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1108.455499] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1108.455803] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ab30814c-da16-436d-ba0a-f20e14310e0c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.481291] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1108.481291] env[68674]: value = "task-3240854" [ 1108.481291] env[68674]: _type = "Task" [ 1108.481291] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.490488] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240854, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.581285] env[68674]: DEBUG oslo_vmware.api [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240853, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.678075] env[68674]: DEBUG nova.compute.manager [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1108.709037] env[68674]: DEBUG nova.virt.hardware [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1108.709380] env[68674]: DEBUG nova.virt.hardware [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1108.709558] env[68674]: DEBUG nova.virt.hardware [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1108.709745] env[68674]: DEBUG nova.virt.hardware [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1108.709894] env[68674]: DEBUG nova.virt.hardware [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1108.710056] env[68674]: DEBUG nova.virt.hardware [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1108.710269] env[68674]: DEBUG nova.virt.hardware [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1108.710446] env[68674]: DEBUG nova.virt.hardware [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1108.710687] env[68674]: DEBUG nova.virt.hardware [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1108.711023] env[68674]: DEBUG nova.virt.hardware [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1108.711179] env[68674]: DEBUG nova.virt.hardware [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1108.712059] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb302bd7-bd02-44cc-9ab4-f071d6e42493 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.717985] env[68674]: DEBUG nova.compute.manager [req-0e488907-9f24-414a-8fc3-79514f40ed5d req-dfa28faf-1a03-4ea9-b908-1067df3ddf49 service nova] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Received event network-changed-15b18361-a1c9-4dab-bcaf-6a40837d6bbe {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1108.718434] env[68674]: DEBUG nova.compute.manager [req-0e488907-9f24-414a-8fc3-79514f40ed5d req-dfa28faf-1a03-4ea9-b908-1067df3ddf49 service nova] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Refreshing instance network info cache due to event network-changed-15b18361-a1c9-4dab-bcaf-6a40837d6bbe. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1108.718434] env[68674]: DEBUG oslo_concurrency.lockutils [req-0e488907-9f24-414a-8fc3-79514f40ed5d req-dfa28faf-1a03-4ea9-b908-1067df3ddf49 service nova] Acquiring lock "refresh_cache-c876b288-de2a-4195-bfef-88f38e219d9a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.718595] env[68674]: DEBUG oslo_concurrency.lockutils [req-0e488907-9f24-414a-8fc3-79514f40ed5d req-dfa28faf-1a03-4ea9-b908-1067df3ddf49 service nova] Acquired lock "refresh_cache-c876b288-de2a-4195-bfef-88f38e219d9a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1108.718711] env[68674]: DEBUG nova.network.neutron [req-0e488907-9f24-414a-8fc3-79514f40ed5d req-dfa28faf-1a03-4ea9-b908-1067df3ddf49 service nova] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Refreshing network info cache for port 15b18361-a1c9-4dab-bcaf-6a40837d6bbe {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1108.726723] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd01c64-53b0-4742-b278-6bf40f1eec13 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.852828] env[68674]: DEBUG nova.compute.manager [req-6b010549-ede2-4c20-8d13-29e75a850837 req-574f4df2-28dc-48d6-aa35-8443a58f4e42 service nova] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Received event network-vif-plugged-bc3acca0-ced5-49ff-9839-c40f7e6bc6f9 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1108.852828] env[68674]: DEBUG oslo_concurrency.lockutils [req-6b010549-ede2-4c20-8d13-29e75a850837 req-574f4df2-28dc-48d6-aa35-8443a58f4e42 service nova] Acquiring lock "0de73fea-8e2d-47ff-a87e-a83708f9b4ad-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1108.853103] env[68674]: DEBUG oslo_concurrency.lockutils [req-6b010549-ede2-4c20-8d13-29e75a850837 req-574f4df2-28dc-48d6-aa35-8443a58f4e42 service nova] Lock "0de73fea-8e2d-47ff-a87e-a83708f9b4ad-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1108.853301] env[68674]: DEBUG oslo_concurrency.lockutils [req-6b010549-ede2-4c20-8d13-29e75a850837 req-574f4df2-28dc-48d6-aa35-8443a58f4e42 service nova] Lock "0de73fea-8e2d-47ff-a87e-a83708f9b4ad-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1108.853491] env[68674]: DEBUG nova.compute.manager [req-6b010549-ede2-4c20-8d13-29e75a850837 req-574f4df2-28dc-48d6-aa35-8443a58f4e42 service nova] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] No waiting events found dispatching network-vif-plugged-bc3acca0-ced5-49ff-9839-c40f7e6bc6f9 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1108.853662] env[68674]: WARNING nova.compute.manager [req-6b010549-ede2-4c20-8d13-29e75a850837 req-574f4df2-28dc-48d6-aa35-8443a58f4e42 service nova] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Received unexpected event network-vif-plugged-bc3acca0-ced5-49ff-9839-c40f7e6bc6f9 for instance with vm_state building and task_state spawning. [ 1108.869442] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1108.930196] env[68674]: DEBUG oslo_concurrency.lockutils [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.278s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1108.930806] env[68674]: DEBUG nova.compute.manager [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1108.934205] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.773s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1108.934477] env[68674]: DEBUG nova.objects.instance [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lazy-loading 'resources' on Instance uuid 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1108.949616] env[68674]: DEBUG nova.network.neutron [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Successfully updated port: bc3acca0-ced5-49ff-9839-c40f7e6bc6f9 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1108.992691] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240854, 'name': CreateVM_Task, 'duration_secs': 0.385571} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.992867] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1108.993580] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'device_type': None, 'attachment_id': '1a9c5916-5bf8-4965-ba43-560c7a54afe2', 'mount_device': '/dev/sda', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647686', 'volume_id': 'ac75a73a-75e6-493d-9caf-c35a3091e391', 'name': 'volume-ac75a73a-75e6-493d-9caf-c35a3091e391', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c876b288-de2a-4195-bfef-88f38e219d9a', 'attached_at': '', 'detached_at': '', 'volume_id': 'ac75a73a-75e6-493d-9caf-c35a3091e391', 'serial': 'ac75a73a-75e6-493d-9caf-c35a3091e391'}, 'boot_index': 0, 'disk_bus': None, 'guest_format': None, 'delete_on_termination': True, 'volume_type': None}], 'swap': None} {{(pid=68674) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1108.993795] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Root volume attach. Driver type: vmdk {{(pid=68674) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1108.994620] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-241793b9-8dd3-463b-81b5-71800a2bac39 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.003059] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cb04f93-4e77-46c3-9ba4-a3ce679042bb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.009417] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e6b2e22-9ece-4078-b1de-d355dfe484f4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.015628] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-6faf1037-36ad-42da-bfb8-5dac9e9568c4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.022962] env[68674]: DEBUG oslo_vmware.api [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 1109.022962] env[68674]: value = "task-3240855" [ 1109.022962] env[68674]: _type = "Task" [ 1109.022962] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.030929] env[68674]: DEBUG oslo_vmware.api [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240855, 'name': RelocateVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.077367] env[68674]: DEBUG oslo_vmware.api [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240853, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.152977} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.077642] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] d77d24ac-b44d-4014-83eb-f486db74ab0b/d77d24ac-b44d-4014-83eb-f486db74ab0b.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1109.077862] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1109.078139] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-510f8d6e-e83c-4a58-8ec9-660dcbb03fa5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.085446] env[68674]: DEBUG oslo_vmware.api [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1109.085446] env[68674]: value = "task-3240856" [ 1109.085446] env[68674]: _type = "Task" [ 1109.085446] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.094268] env[68674]: DEBUG oslo_vmware.api [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240856, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.437807] env[68674]: DEBUG nova.compute.utils [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1109.441937] env[68674]: DEBUG nova.compute.manager [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1109.442132] env[68674]: DEBUG nova.network.neutron [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1109.451852] env[68674]: DEBUG oslo_concurrency.lockutils [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "refresh_cache-0de73fea-8e2d-47ff-a87e-a83708f9b4ad" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1109.452007] env[68674]: DEBUG oslo_concurrency.lockutils [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquired lock "refresh_cache-0de73fea-8e2d-47ff-a87e-a83708f9b4ad" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1109.452169] env[68674]: DEBUG nova.network.neutron [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1109.466080] env[68674]: DEBUG nova.network.neutron [req-0e488907-9f24-414a-8fc3-79514f40ed5d req-dfa28faf-1a03-4ea9-b908-1067df3ddf49 service nova] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Updated VIF entry in instance network info cache for port 15b18361-a1c9-4dab-bcaf-6a40837d6bbe. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1109.466435] env[68674]: DEBUG nova.network.neutron [req-0e488907-9f24-414a-8fc3-79514f40ed5d req-dfa28faf-1a03-4ea9-b908-1067df3ddf49 service nova] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Updating instance_info_cache with network_info: [{"id": "15b18361-a1c9-4dab-bcaf-6a40837d6bbe", "address": "fa:16:3e:99:3b:04", "network": {"id": "eae87694-bbf6-4eed-9305-26be80e0529b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1262353116-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c958fcb56a934ef7919b76aa2a193429", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15b18361-a1", "ovs_interfaceid": "15b18361-a1c9-4dab-bcaf-6a40837d6bbe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1109.495570] env[68674]: DEBUG nova.policy [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3ce343abf0f14bb5b5141c50113ccf6b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61ea6bfeb37d470a970e9c98e4827ade', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 1109.533663] env[68674]: DEBUG oslo_vmware.api [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240855, 'name': RelocateVM_Task, 'duration_secs': 0.457425} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.535995] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Volume attach. Driver type: vmdk {{(pid=68674) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1109.536313] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647686', 'volume_id': 'ac75a73a-75e6-493d-9caf-c35a3091e391', 'name': 'volume-ac75a73a-75e6-493d-9caf-c35a3091e391', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c876b288-de2a-4195-bfef-88f38e219d9a', 'attached_at': '', 'detached_at': '', 'volume_id': 'ac75a73a-75e6-493d-9caf-c35a3091e391', 'serial': 'ac75a73a-75e6-493d-9caf-c35a3091e391'} {{(pid=68674) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1109.537362] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aa6b881-6168-4c84-a200-6eb95499fff2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.557471] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1ed43e6-c564-42fa-b071-da699cf73934 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.579671] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] volume-ac75a73a-75e6-493d-9caf-c35a3091e391/volume-ac75a73a-75e6-493d-9caf-c35a3091e391.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1109.582466] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-601c2db5-778a-4b6f-b127-41856fe351c9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.608012] env[68674]: DEBUG oslo_vmware.api [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240856, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078185} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.609353] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1109.609705] env[68674]: DEBUG oslo_vmware.api [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 1109.609705] env[68674]: value = "task-3240857" [ 1109.609705] env[68674]: _type = "Task" [ 1109.609705] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.613020] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aac6ec58-4567-451a-a648-bee466f04a98 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.623653] env[68674]: DEBUG oslo_vmware.api [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240857, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.641895] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] d77d24ac-b44d-4014-83eb-f486db74ab0b/d77d24ac-b44d-4014-83eb-f486db74ab0b.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1109.644727] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e1b4f461-69ba-42d9-9e69-db184441db54 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.665176] env[68674]: DEBUG oslo_vmware.api [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1109.665176] env[68674]: value = "task-3240858" [ 1109.665176] env[68674]: _type = "Task" [ 1109.665176] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.675723] env[68674]: DEBUG oslo_vmware.api [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240858, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.723832] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca0818c5-8607-40c8-8f3b-505efeb0f78c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.731288] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28555618-f2b2-434a-8949-1b35f7b75961 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.762594] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9c78d7b-b193-40e4-88f7-4c86c21a22b1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.770186] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d133f6cf-606a-4f6f-8342-a93d8e1af257 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.783574] env[68674]: DEBUG nova.compute.provider_tree [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1109.796821] env[68674]: DEBUG nova.network.neutron [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Successfully created port: 0f444395-3a03-4d13-9c2e-20a2965fcb9b {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1109.942729] env[68674]: DEBUG nova.compute.manager [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1109.968946] env[68674]: DEBUG oslo_concurrency.lockutils [req-0e488907-9f24-414a-8fc3-79514f40ed5d req-dfa28faf-1a03-4ea9-b908-1067df3ddf49 service nova] Releasing lock "refresh_cache-c876b288-de2a-4195-bfef-88f38e219d9a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1109.987388] env[68674]: DEBUG nova.network.neutron [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1110.122731] env[68674]: DEBUG oslo_vmware.api [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240857, 'name': ReconfigVM_Task, 'duration_secs': 0.283813} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.123056] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Reconfigured VM instance instance-00000071 to attach disk [datastore1] volume-ac75a73a-75e6-493d-9caf-c35a3091e391/volume-ac75a73a-75e6-493d-9caf-c35a3091e391.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1110.128570] env[68674]: DEBUG nova.network.neutron [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Updating instance_info_cache with network_info: [{"id": "bc3acca0-ced5-49ff-9839-c40f7e6bc6f9", "address": "fa:16:3e:45:7a:70", "network": {"id": "c0c4733f-8d0b-4cee-883f-2ad57ed16158", "bridge": "br-int", "label": "tempest-ServersTestJSON-40114649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cee54e456084086866d08b098a24b64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc3acca0-ce", "ovs_interfaceid": "bc3acca0-ced5-49ff-9839-c40f7e6bc6f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1110.129763] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b7bf34ac-ad07-41c7-b774-a01277ed5383 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.145911] env[68674]: DEBUG oslo_vmware.api [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 1110.145911] env[68674]: value = "task-3240859" [ 1110.145911] env[68674]: _type = "Task" [ 1110.145911] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.154159] env[68674]: DEBUG oslo_vmware.api [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240859, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.173688] env[68674]: DEBUG oslo_vmware.api [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240858, 'name': ReconfigVM_Task, 'duration_secs': 0.366311} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.173999] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Reconfigured VM instance instance-00000070 to attach disk [datastore2] d77d24ac-b44d-4014-83eb-f486db74ab0b/d77d24ac-b44d-4014-83eb-f486db74ab0b.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1110.174648] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3cdfa0d1-008b-43b9-b7d4-8383bdaa30bf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.180840] env[68674]: DEBUG oslo_vmware.api [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1110.180840] env[68674]: value = "task-3240860" [ 1110.180840] env[68674]: _type = "Task" [ 1110.180840] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.188253] env[68674]: DEBUG oslo_vmware.api [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240860, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.286730] env[68674]: DEBUG nova.scheduler.client.report [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1110.640661] env[68674]: DEBUG oslo_concurrency.lockutils [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Releasing lock "refresh_cache-0de73fea-8e2d-47ff-a87e-a83708f9b4ad" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1110.641012] env[68674]: DEBUG nova.compute.manager [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Instance network_info: |[{"id": "bc3acca0-ced5-49ff-9839-c40f7e6bc6f9", "address": "fa:16:3e:45:7a:70", "network": {"id": "c0c4733f-8d0b-4cee-883f-2ad57ed16158", "bridge": "br-int", "label": "tempest-ServersTestJSON-40114649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cee54e456084086866d08b098a24b64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc3acca0-ce", "ovs_interfaceid": "bc3acca0-ced5-49ff-9839-c40f7e6bc6f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1110.641425] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:7a:70', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bc3acca0-ced5-49ff-9839-c40f7e6bc6f9', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1110.649216] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1110.649498] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1110.653016] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7760a839-e993-478b-8b90-5eee73684076 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.673015] env[68674]: DEBUG oslo_vmware.api [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240859, 'name': ReconfigVM_Task, 'duration_secs': 0.127862} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.674253] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647686', 'volume_id': 'ac75a73a-75e6-493d-9caf-c35a3091e391', 'name': 'volume-ac75a73a-75e6-493d-9caf-c35a3091e391', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c876b288-de2a-4195-bfef-88f38e219d9a', 'attached_at': '', 'detached_at': '', 'volume_id': 'ac75a73a-75e6-493d-9caf-c35a3091e391', 'serial': 'ac75a73a-75e6-493d-9caf-c35a3091e391'} {{(pid=68674) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1110.674747] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1110.674747] env[68674]: value = "task-3240861" [ 1110.674747] env[68674]: _type = "Task" [ 1110.674747] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.674942] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-83c6ab68-b6d1-4f38-bb76-b0b6db239fe8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.683748] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240861, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.687331] env[68674]: DEBUG oslo_vmware.api [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 1110.687331] env[68674]: value = "task-3240862" [ 1110.687331] env[68674]: _type = "Task" [ 1110.687331] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.695060] env[68674]: DEBUG oslo_vmware.api [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240860, 'name': Rename_Task, 'duration_secs': 0.166058} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.697713] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1110.697955] env[68674]: DEBUG oslo_vmware.api [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240862, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.698165] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-072105c3-8b2b-41b6-9ede-bf101c1fcfdb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.716559] env[68674]: DEBUG oslo_vmware.api [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1110.716559] env[68674]: value = "task-3240863" [ 1110.716559] env[68674]: _type = "Task" [ 1110.716559] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.724597] env[68674]: DEBUG oslo_vmware.api [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240863, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.791919] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.858s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1110.795416] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c91c1efc-3855-475d-b5eb-59da7ac18efc tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.632s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1110.795848] env[68674]: DEBUG nova.objects.instance [None req-c91c1efc-3855-475d-b5eb-59da7ac18efc tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lazy-loading 'resources' on Instance uuid bd3ae195-6e01-49d5-9fcf-9520273d9108 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1110.884361] env[68674]: DEBUG nova.compute.manager [req-2ef5acf8-75c4-4d61-9d79-306f50a009a7 req-5613add5-c498-4aa9-98b5-69ad4bcf671c service nova] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Received event network-changed-bc3acca0-ced5-49ff-9839-c40f7e6bc6f9 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1110.884611] env[68674]: DEBUG nova.compute.manager [req-2ef5acf8-75c4-4d61-9d79-306f50a009a7 req-5613add5-c498-4aa9-98b5-69ad4bcf671c service nova] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Refreshing instance network info cache due to event network-changed-bc3acca0-ced5-49ff-9839-c40f7e6bc6f9. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1110.884892] env[68674]: DEBUG oslo_concurrency.lockutils [req-2ef5acf8-75c4-4d61-9d79-306f50a009a7 req-5613add5-c498-4aa9-98b5-69ad4bcf671c service nova] Acquiring lock "refresh_cache-0de73fea-8e2d-47ff-a87e-a83708f9b4ad" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1110.885073] env[68674]: DEBUG oslo_concurrency.lockutils [req-2ef5acf8-75c4-4d61-9d79-306f50a009a7 req-5613add5-c498-4aa9-98b5-69ad4bcf671c service nova] Acquired lock "refresh_cache-0de73fea-8e2d-47ff-a87e-a83708f9b4ad" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1110.885245] env[68674]: DEBUG nova.network.neutron [req-2ef5acf8-75c4-4d61-9d79-306f50a009a7 req-5613add5-c498-4aa9-98b5-69ad4bcf671c service nova] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Refreshing network info cache for port bc3acca0-ced5-49ff-9839-c40f7e6bc6f9 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1110.953223] env[68674]: INFO nova.scheduler.client.report [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Deleted allocations for instance 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d [ 1110.955555] env[68674]: DEBUG nova.compute.manager [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1110.985276] env[68674]: DEBUG nova.virt.hardware [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1110.985540] env[68674]: DEBUG nova.virt.hardware [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1110.985696] env[68674]: DEBUG nova.virt.hardware [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1110.985904] env[68674]: DEBUG nova.virt.hardware [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1110.986090] env[68674]: DEBUG nova.virt.hardware [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1110.986244] env[68674]: DEBUG nova.virt.hardware [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1110.986479] env[68674]: DEBUG nova.virt.hardware [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1110.986667] env[68674]: DEBUG nova.virt.hardware [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1110.986856] env[68674]: DEBUG nova.virt.hardware [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1110.987069] env[68674]: DEBUG nova.virt.hardware [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1110.987337] env[68674]: DEBUG nova.virt.hardware [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1110.988244] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6081e8a8-b6e8-4cd3-862d-4e64bac007b6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.996494] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d139cb9e-cf37-45d6-9d04-9e3353b4cb7a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.188105] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240861, 'name': CreateVM_Task, 'duration_secs': 0.357565} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.191339] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1111.192174] env[68674]: DEBUG oslo_concurrency.lockutils [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1111.192278] env[68674]: DEBUG oslo_concurrency.lockutils [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1111.192541] env[68674]: DEBUG oslo_concurrency.lockutils [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1111.193166] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a0711b9-14bc-4eba-bd2e-5c3b7e2da3fe {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.197894] env[68674]: DEBUG oslo_vmware.api [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240862, 'name': Rename_Task, 'duration_secs': 0.165093} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.199110] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1111.199517] env[68674]: DEBUG oslo_vmware.api [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1111.199517] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ca00d1-8ee8-9f5b-d7c4-3bf593cac636" [ 1111.199517] env[68674]: _type = "Task" [ 1111.199517] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.199680] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b8c99437-5573-46f8-acd5-37b059da99b9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.209108] env[68674]: DEBUG oslo_vmware.api [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ca00d1-8ee8-9f5b-d7c4-3bf593cac636, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.210375] env[68674]: DEBUG oslo_vmware.api [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 1111.210375] env[68674]: value = "task-3240864" [ 1111.210375] env[68674]: _type = "Task" [ 1111.210375] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.217066] env[68674]: DEBUG nova.compute.manager [req-e9f9a6a0-da5f-4a72-b34f-923cb24b9d93 req-113945e3-a364-46ae-b26c-a51bd6f88737 service nova] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Received event network-vif-plugged-0f444395-3a03-4d13-9c2e-20a2965fcb9b {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1111.217298] env[68674]: DEBUG oslo_concurrency.lockutils [req-e9f9a6a0-da5f-4a72-b34f-923cb24b9d93 req-113945e3-a364-46ae-b26c-a51bd6f88737 service nova] Acquiring lock "8f183286-f908-4d05-9a61-d6b1bf10dfb9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1111.217503] env[68674]: DEBUG oslo_concurrency.lockutils [req-e9f9a6a0-da5f-4a72-b34f-923cb24b9d93 req-113945e3-a364-46ae-b26c-a51bd6f88737 service nova] Lock "8f183286-f908-4d05-9a61-d6b1bf10dfb9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1111.217666] env[68674]: DEBUG oslo_concurrency.lockutils [req-e9f9a6a0-da5f-4a72-b34f-923cb24b9d93 req-113945e3-a364-46ae-b26c-a51bd6f88737 service nova] Lock "8f183286-f908-4d05-9a61-d6b1bf10dfb9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1111.217825] env[68674]: DEBUG nova.compute.manager [req-e9f9a6a0-da5f-4a72-b34f-923cb24b9d93 req-113945e3-a364-46ae-b26c-a51bd6f88737 service nova] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] No waiting events found dispatching network-vif-plugged-0f444395-3a03-4d13-9c2e-20a2965fcb9b {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1111.218017] env[68674]: WARNING nova.compute.manager [req-e9f9a6a0-da5f-4a72-b34f-923cb24b9d93 req-113945e3-a364-46ae-b26c-a51bd6f88737 service nova] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Received unexpected event network-vif-plugged-0f444395-3a03-4d13-9c2e-20a2965fcb9b for instance with vm_state building and task_state spawning. [ 1111.221289] env[68674]: DEBUG oslo_vmware.api [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240864, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.230358] env[68674]: DEBUG oslo_vmware.api [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240863, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.309223] env[68674]: DEBUG nova.network.neutron [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Successfully updated port: 0f444395-3a03-4d13-9c2e-20a2965fcb9b {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1111.467254] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d1d6487-6949-4910-92f8-a1b0ce9b945a tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.409s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1111.497473] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-282b744d-ee95-4a1b-b560-323088718d93 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.505731] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1db54169-69ae-437e-9c6e-0e85678c7f3f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.549859] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a24c6577-581f-41b6-a638-46c897307f84 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.558138] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ec55440-e4f8-470e-b9c7-7e18cf32c678 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.574322] env[68674]: DEBUG nova.compute.provider_tree [None req-c91c1efc-3855-475d-b5eb-59da7ac18efc tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1111.657029] env[68674]: DEBUG nova.network.neutron [req-2ef5acf8-75c4-4d61-9d79-306f50a009a7 req-5613add5-c498-4aa9-98b5-69ad4bcf671c service nova] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Updated VIF entry in instance network info cache for port bc3acca0-ced5-49ff-9839-c40f7e6bc6f9. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1111.657029] env[68674]: DEBUG nova.network.neutron [req-2ef5acf8-75c4-4d61-9d79-306f50a009a7 req-5613add5-c498-4aa9-98b5-69ad4bcf671c service nova] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Updating instance_info_cache with network_info: [{"id": "bc3acca0-ced5-49ff-9839-c40f7e6bc6f9", "address": "fa:16:3e:45:7a:70", "network": {"id": "c0c4733f-8d0b-4cee-883f-2ad57ed16158", "bridge": "br-int", "label": "tempest-ServersTestJSON-40114649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cee54e456084086866d08b098a24b64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc3acca0-ce", "ovs_interfaceid": "bc3acca0-ced5-49ff-9839-c40f7e6bc6f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1111.713441] env[68674]: DEBUG oslo_vmware.api [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52ca00d1-8ee8-9f5b-d7c4-3bf593cac636, 'name': SearchDatastore_Task, 'duration_secs': 0.015224} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.716846] env[68674]: DEBUG oslo_concurrency.lockutils [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1111.717149] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1111.717405] env[68674]: DEBUG oslo_concurrency.lockutils [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1111.717680] env[68674]: DEBUG oslo_concurrency.lockutils [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1111.717734] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1111.718036] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-525f7fa0-aa93-47a2-ac4f-089492bd1e8c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.728060] env[68674]: DEBUG oslo_vmware.api [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240864, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.728991] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1111.729165] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1111.729831] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb1def97-06f3-4d6d-b05d-1601c06753ea {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.734909] env[68674]: DEBUG oslo_vmware.api [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240863, 'name': PowerOnVM_Task, 'duration_secs': 0.638364} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.736188] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1111.736188] env[68674]: INFO nova.compute.manager [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Took 9.59 seconds to spawn the instance on the hypervisor. [ 1111.736188] env[68674]: DEBUG nova.compute.manager [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1111.736864] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-645660e4-c23c-43a1-a051-60aeb660e8ff {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.741582] env[68674]: DEBUG oslo_vmware.api [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1111.741582] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]522b1561-67aa-fdab-9287-4466ad9e6de5" [ 1111.741582] env[68674]: _type = "Task" [ 1111.741582] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.755023] env[68674]: DEBUG oslo_vmware.api [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]522b1561-67aa-fdab-9287-4466ad9e6de5, 'name': SearchDatastore_Task, 'duration_secs': 0.014166} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.755880] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf5331fd-1602-4a54-80cf-9ea79eea8ba0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.761203] env[68674]: DEBUG oslo_vmware.api [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1111.761203] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523047ca-d501-36eb-071c-b1151cba080d" [ 1111.761203] env[68674]: _type = "Task" [ 1111.761203] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.770428] env[68674]: DEBUG oslo_vmware.api [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523047ca-d501-36eb-071c-b1151cba080d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.814636] env[68674]: DEBUG oslo_concurrency.lockutils [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "refresh_cache-8f183286-f908-4d05-9a61-d6b1bf10dfb9" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1111.814636] env[68674]: DEBUG oslo_concurrency.lockutils [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquired lock "refresh_cache-8f183286-f908-4d05-9a61-d6b1bf10dfb9" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1111.814636] env[68674]: DEBUG nova.network.neutron [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1112.078984] env[68674]: DEBUG nova.scheduler.client.report [None req-c91c1efc-3855-475d-b5eb-59da7ac18efc tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1112.160059] env[68674]: DEBUG oslo_concurrency.lockutils [req-2ef5acf8-75c4-4d61-9d79-306f50a009a7 req-5613add5-c498-4aa9-98b5-69ad4bcf671c service nova] Releasing lock "refresh_cache-0de73fea-8e2d-47ff-a87e-a83708f9b4ad" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1112.226270] env[68674]: DEBUG oslo_vmware.api [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240864, 'name': PowerOnVM_Task, 'duration_secs': 0.592369} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.226660] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1112.226760] env[68674]: INFO nova.compute.manager [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Took 4.12 seconds to spawn the instance on the hypervisor. [ 1112.226961] env[68674]: DEBUG nova.compute.manager [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1112.227764] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9a14cc8-386a-43ac-84df-72cd78426551 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.256258] env[68674]: INFO nova.compute.manager [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Took 16.69 seconds to build instance. [ 1112.271614] env[68674]: DEBUG oslo_vmware.api [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523047ca-d501-36eb-071c-b1151cba080d, 'name': SearchDatastore_Task, 'duration_secs': 0.023435} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.272339] env[68674]: DEBUG oslo_concurrency.lockutils [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1112.272339] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 0de73fea-8e2d-47ff-a87e-a83708f9b4ad/0de73fea-8e2d-47ff-a87e-a83708f9b4ad.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1112.273114] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-479b1438-dbf8-46b4-9419-8edde17948ee {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.280195] env[68674]: DEBUG oslo_vmware.api [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1112.280195] env[68674]: value = "task-3240865" [ 1112.280195] env[68674]: _type = "Task" [ 1112.280195] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.288012] env[68674]: DEBUG oslo_vmware.api [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240865, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.344241] env[68674]: DEBUG nova.network.neutron [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1112.468095] env[68674]: DEBUG nova.network.neutron [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Updating instance_info_cache with network_info: [{"id": "0f444395-3a03-4d13-9c2e-20a2965fcb9b", "address": "fa:16:3e:34:66:f6", "network": {"id": "cd9a6296-fa96-4117-b8b5-3884d0d16745", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1543887384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61ea6bfeb37d470a970e9c98e4827ade", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f444395-3a", "ovs_interfaceid": "0f444395-3a03-4d13-9c2e-20a2965fcb9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1112.583823] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c91c1efc-3855-475d-b5eb-59da7ac18efc tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.788s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.586449] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 3.717s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1112.586649] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.586816] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68674) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1112.587753] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fecc9b3-d830-4e75-a8f2-2723c4cd65fc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.596107] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fdbc45c-9413-4bad-a119-809b17a72998 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.611706] env[68674]: INFO nova.scheduler.client.report [None req-c91c1efc-3855-475d-b5eb-59da7ac18efc tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Deleted allocations for instance bd3ae195-6e01-49d5-9fcf-9520273d9108 [ 1112.613332] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5090a02-f391-4360-8dcb-65b517ac0e72 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.622661] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e144d65-cd99-402d-abf6-daec8f021f12 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.654090] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179206MB free_disk=120GB free_vcpus=48 pci_devices=None {{(pid=68674) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1112.654090] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1112.654387] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1112.751042] env[68674]: INFO nova.compute.manager [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Took 13.17 seconds to build instance. [ 1112.759312] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b92328d8-ed75-4a9e-99c6-3253a93ff9d9 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lock "d77d24ac-b44d-4014-83eb-f486db74ab0b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.195s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.791668] env[68674]: DEBUG oslo_vmware.api [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240865, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.809521] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "6cf18175-1436-4ba5-b4b3-8641ec6bdad1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1112.809802] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "6cf18175-1436-4ba5-b4b3-8641ec6bdad1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1112.973023] env[68674]: DEBUG oslo_concurrency.lockutils [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Releasing lock "refresh_cache-8f183286-f908-4d05-9a61-d6b1bf10dfb9" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1112.973023] env[68674]: DEBUG nova.compute.manager [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Instance network_info: |[{"id": "0f444395-3a03-4d13-9c2e-20a2965fcb9b", "address": "fa:16:3e:34:66:f6", "network": {"id": "cd9a6296-fa96-4117-b8b5-3884d0d16745", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1543887384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61ea6bfeb37d470a970e9c98e4827ade", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f444395-3a", "ovs_interfaceid": "0f444395-3a03-4d13-9c2e-20a2965fcb9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1112.973023] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:34:66:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '40c947c4-f471-4d48-8e43-fee54198107e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0f444395-3a03-4d13-9c2e-20a2965fcb9b', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1112.979759] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1112.980384] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1112.980668] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-95c94c15-1692-4ec8-842d-e0ae89b3b43c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.001420] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1113.001420] env[68674]: value = "task-3240866" [ 1113.001420] env[68674]: _type = "Task" [ 1113.001420] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.012912] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240866, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.122438] env[68674]: DEBUG oslo_concurrency.lockutils [None req-c91c1efc-3855-475d-b5eb-59da7ac18efc tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "bd3ae195-6e01-49d5-9fcf-9520273d9108" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.063s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1113.249907] env[68674]: DEBUG nova.compute.manager [req-4968a91e-0680-42dc-bcac-162165216b78 req-cd3521e3-a8b9-426c-8f41-83aabe7120d4 service nova] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Received event network-changed-0f444395-3a03-4d13-9c2e-20a2965fcb9b {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1113.250178] env[68674]: DEBUG nova.compute.manager [req-4968a91e-0680-42dc-bcac-162165216b78 req-cd3521e3-a8b9-426c-8f41-83aabe7120d4 service nova] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Refreshing instance network info cache due to event network-changed-0f444395-3a03-4d13-9c2e-20a2965fcb9b. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1113.250296] env[68674]: DEBUG oslo_concurrency.lockutils [req-4968a91e-0680-42dc-bcac-162165216b78 req-cd3521e3-a8b9-426c-8f41-83aabe7120d4 service nova] Acquiring lock "refresh_cache-8f183286-f908-4d05-9a61-d6b1bf10dfb9" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1113.250446] env[68674]: DEBUG oslo_concurrency.lockutils [req-4968a91e-0680-42dc-bcac-162165216b78 req-cd3521e3-a8b9-426c-8f41-83aabe7120d4 service nova] Acquired lock "refresh_cache-8f183286-f908-4d05-9a61-d6b1bf10dfb9" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1113.250611] env[68674]: DEBUG nova.network.neutron [req-4968a91e-0680-42dc-bcac-162165216b78 req-cd3521e3-a8b9-426c-8f41-83aabe7120d4 service nova] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Refreshing network info cache for port 0f444395-3a03-4d13-9c2e-20a2965fcb9b {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1113.252683] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fdb8023c-c742-4169-8414-a1b6f91e1319 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "c876b288-de2a-4195-bfef-88f38e219d9a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.682s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1113.290735] env[68674]: DEBUG oslo_vmware.api [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240865, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.683338} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.291014] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 0de73fea-8e2d-47ff-a87e-a83708f9b4ad/0de73fea-8e2d-47ff-a87e-a83708f9b4ad.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1113.291245] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1113.291565] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cfa54290-3e9d-4693-b3c6-b2cbcb653d1a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.298286] env[68674]: DEBUG oslo_vmware.api [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1113.298286] env[68674]: value = "task-3240867" [ 1113.298286] env[68674]: _type = "Task" [ 1113.298286] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.307840] env[68674]: DEBUG oslo_vmware.api [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240867, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.314825] env[68674]: DEBUG nova.compute.manager [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1113.452422] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7ff53d51-3b16-4513-b072-6c7457999c46 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "66f4ab32-ef66-4d1d-93b6-775d59ce3c41" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1113.452696] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7ff53d51-3b16-4513-b072-6c7457999c46 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "66f4ab32-ef66-4d1d-93b6-775d59ce3c41" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1113.452914] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7ff53d51-3b16-4513-b072-6c7457999c46 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "66f4ab32-ef66-4d1d-93b6-775d59ce3c41-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1113.453110] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7ff53d51-3b16-4513-b072-6c7457999c46 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "66f4ab32-ef66-4d1d-93b6-775d59ce3c41-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1113.453281] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7ff53d51-3b16-4513-b072-6c7457999c46 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "66f4ab32-ef66-4d1d-93b6-775d59ce3c41-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1113.455278] env[68674]: INFO nova.compute.manager [None req-7ff53d51-3b16-4513-b072-6c7457999c46 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Terminating instance [ 1113.511420] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f4f7039f-0687-42e0-a21d-3143713adadd tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquiring lock "d77d24ac-b44d-4014-83eb-f486db74ab0b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1113.511701] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f4f7039f-0687-42e0-a21d-3143713adadd tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lock "d77d24ac-b44d-4014-83eb-f486db74ab0b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1113.511914] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f4f7039f-0687-42e0-a21d-3143713adadd tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquiring lock "d77d24ac-b44d-4014-83eb-f486db74ab0b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1113.512121] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f4f7039f-0687-42e0-a21d-3143713adadd tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lock "d77d24ac-b44d-4014-83eb-f486db74ab0b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1113.512350] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f4f7039f-0687-42e0-a21d-3143713adadd tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lock "d77d24ac-b44d-4014-83eb-f486db74ab0b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1113.513955] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240866, 'name': CreateVM_Task, 'duration_secs': 0.419648} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.514533] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1113.515166] env[68674]: DEBUG oslo_concurrency.lockutils [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1113.515443] env[68674]: DEBUG oslo_concurrency.lockutils [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1113.515659] env[68674]: DEBUG oslo_concurrency.lockutils [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1113.516234] env[68674]: INFO nova.compute.manager [None req-f4f7039f-0687-42e0-a21d-3143713adadd tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Terminating instance [ 1113.517578] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8387b127-9d65-47e3-929d-54d00ceae04d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.523850] env[68674]: DEBUG oslo_vmware.api [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1113.523850] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52e1b553-e5b8-a11d-d727-7ef34f9b2705" [ 1113.523850] env[68674]: _type = "Task" [ 1113.523850] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.532454] env[68674]: DEBUG oslo_vmware.api [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52e1b553-e5b8-a11d-d727-7ef34f9b2705, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.687341] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 23891bad-1b63-4237-9243-78954cf67d52 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1113.687560] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance e371ae6b-44fd-47ce-9c58-8981e7da5cbc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1113.687727] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 30731a3c-34ba-40c8-9b8f-2d867eff4f21 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1113.687884] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 66f4ab32-ef66-4d1d-93b6-775d59ce3c41 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1113.688048] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1113.688201] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance ba4bfbb4-a89b-4ab6-964e-792647fd5a89 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1113.688347] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance dbbf1313-6e44-45e2-8bf6-83409f06cb4b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1113.688492] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 9b8aad00-0980-4752-954a-c09c9ae6f9ec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1113.688635] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 5384c82b-a584-430f-8ef1-e2731562b5ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1113.688780] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance d77d24ac-b44d-4014-83eb-f486db74ab0b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1113.688924] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance c876b288-de2a-4195-bfef-88f38e219d9a actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1113.689168] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 0de73fea-8e2d-47ff-a87e-a83708f9b4ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1113.689344] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 8f183286-f908-4d05-9a61-d6b1bf10dfb9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1113.807816] env[68674]: DEBUG oslo_vmware.api [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240867, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.096728} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.808274] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1113.809200] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17c43ffe-4657-4cda-b6b9-ff5db3559b2e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.842037] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] 0de73fea-8e2d-47ff-a87e-a83708f9b4ad/0de73fea-8e2d-47ff-a87e-a83708f9b4ad.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1113.847927] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-30f0bd95-4ecf-4955-b775-940a5ecc17de {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.872402] env[68674]: DEBUG oslo_vmware.api [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1113.872402] env[68674]: value = "task-3240868" [ 1113.872402] env[68674]: _type = "Task" [ 1113.872402] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.880622] env[68674]: DEBUG oslo_vmware.api [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240868, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.886213] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1113.959547] env[68674]: DEBUG nova.compute.manager [None req-7ff53d51-3b16-4513-b072-6c7457999c46 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1113.959834] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff53d51-3b16-4513-b072-6c7457999c46 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1113.961215] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87a129a8-393e-4ec9-8890-b9d340823495 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.973166] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ff53d51-3b16-4513-b072-6c7457999c46 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1113.973445] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e8f3c319-b64c-4a08-aeb1-fe751580f2dc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.982192] env[68674]: DEBUG oslo_vmware.api [None req-7ff53d51-3b16-4513-b072-6c7457999c46 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 1113.982192] env[68674]: value = "task-3240869" [ 1113.982192] env[68674]: _type = "Task" [ 1113.982192] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.992392] env[68674]: DEBUG oslo_vmware.api [None req-7ff53d51-3b16-4513-b072-6c7457999c46 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240869, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.022825] env[68674]: DEBUG nova.compute.manager [None req-f4f7039f-0687-42e0-a21d-3143713adadd tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1114.023104] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f4f7039f-0687-42e0-a21d-3143713adadd tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1114.024046] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27de27a5-cd48-4f50-a995-ca11a9af249c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.036413] env[68674]: DEBUG oslo_vmware.api [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52e1b553-e5b8-a11d-d727-7ef34f9b2705, 'name': SearchDatastore_Task, 'duration_secs': 0.011998} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.038701] env[68674]: DEBUG oslo_concurrency.lockutils [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1114.040158] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1114.040158] env[68674]: DEBUG oslo_concurrency.lockutils [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1114.040158] env[68674]: DEBUG oslo_concurrency.lockutils [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1114.040158] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1114.040158] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4f7039f-0687-42e0-a21d-3143713adadd tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1114.040869] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ca081781-88cc-4809-9e50-6a802877820b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.043059] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eb67864d-75a8-497e-a372-03675d4f9832 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.053196] env[68674]: DEBUG oslo_vmware.api [None req-f4f7039f-0687-42e0-a21d-3143713adadd tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1114.053196] env[68674]: value = "task-3240870" [ 1114.053196] env[68674]: _type = "Task" [ 1114.053196] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.056467] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1114.056716] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1114.057827] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f00b471-c5a1-437f-99b6-48afdb43cb1c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.063918] env[68674]: DEBUG oslo_vmware.api [None req-f4f7039f-0687-42e0-a21d-3143713adadd tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240870, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.067873] env[68674]: DEBUG nova.compute.manager [req-0a82fd76-af47-4685-bc18-8810cdccaf32 req-a423edf7-85b9-4c4c-ae54-ab2e119ed36a service nova] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Received event network-changed-3660c8d4-d8be-4132-b92b-f96aa37b627b {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1114.069075] env[68674]: DEBUG nova.compute.manager [req-0a82fd76-af47-4685-bc18-8810cdccaf32 req-a423edf7-85b9-4c4c-ae54-ab2e119ed36a service nova] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Refreshing instance network info cache due to event network-changed-3660c8d4-d8be-4132-b92b-f96aa37b627b. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1114.069075] env[68674]: DEBUG oslo_concurrency.lockutils [req-0a82fd76-af47-4685-bc18-8810cdccaf32 req-a423edf7-85b9-4c4c-ae54-ab2e119ed36a service nova] Acquiring lock "refresh_cache-23891bad-1b63-4237-9243-78954cf67d52" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1114.069075] env[68674]: DEBUG oslo_concurrency.lockutils [req-0a82fd76-af47-4685-bc18-8810cdccaf32 req-a423edf7-85b9-4c4c-ae54-ab2e119ed36a service nova] Acquired lock "refresh_cache-23891bad-1b63-4237-9243-78954cf67d52" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1114.069075] env[68674]: DEBUG nova.network.neutron [req-0a82fd76-af47-4685-bc18-8810cdccaf32 req-a423edf7-85b9-4c4c-ae54-ab2e119ed36a service nova] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Refreshing network info cache for port 3660c8d4-d8be-4132-b92b-f96aa37b627b {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1114.071813] env[68674]: DEBUG oslo_vmware.api [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1114.071813] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]522fc892-8e10-8c67-b9ea-dadde75b5fbe" [ 1114.071813] env[68674]: _type = "Task" [ 1114.071813] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.084568] env[68674]: DEBUG oslo_vmware.api [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]522fc892-8e10-8c67-b9ea-dadde75b5fbe, 'name': SearchDatastore_Task, 'duration_secs': 0.010923} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.085613] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-584823c3-9054-4849-bd18-5aa22f118077 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.092137] env[68674]: DEBUG oslo_vmware.api [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1114.092137] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528c0052-88f8-0726-2402-feefa89871d3" [ 1114.092137] env[68674]: _type = "Task" [ 1114.092137] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.105099] env[68674]: DEBUG oslo_vmware.api [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528c0052-88f8-0726-2402-feefa89871d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.192677] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 6cf18175-1436-4ba5-b4b3-8641ec6bdad1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1114.193011] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Total usable vcpus: 48, total allocated vcpus: 13 {{(pid=68674) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1114.193011] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3008MB phys_disk=200GB used_disk=12GB total_vcpus=48 used_vcpus=13 pci_stats=[] {{(pid=68674) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1114.339019] env[68674]: DEBUG nova.network.neutron [req-4968a91e-0680-42dc-bcac-162165216b78 req-cd3521e3-a8b9-426c-8f41-83aabe7120d4 service nova] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Updated VIF entry in instance network info cache for port 0f444395-3a03-4d13-9c2e-20a2965fcb9b. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1114.339019] env[68674]: DEBUG nova.network.neutron [req-4968a91e-0680-42dc-bcac-162165216b78 req-cd3521e3-a8b9-426c-8f41-83aabe7120d4 service nova] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Updating instance_info_cache with network_info: [{"id": "0f444395-3a03-4d13-9c2e-20a2965fcb9b", "address": "fa:16:3e:34:66:f6", "network": {"id": "cd9a6296-fa96-4117-b8b5-3884d0d16745", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1543887384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61ea6bfeb37d470a970e9c98e4827ade", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f444395-3a", "ovs_interfaceid": "0f444395-3a03-4d13-9c2e-20a2965fcb9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1114.385508] env[68674]: DEBUG oslo_vmware.api [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240868, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.407894] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f43bec6-4226-4dae-8c1d-25658f97ae03 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.415721] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77342d59-30bf-48a2-9a5e-40b490e9dd82 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.449051] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af4beb11-dd76-40d6-9af1-9448d41e1419 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.456724] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-636d9e15-51fb-44cc-a6e3-ccd681980f98 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.469576] env[68674]: DEBUG nova.compute.provider_tree [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1114.493644] env[68674]: DEBUG oslo_vmware.api [None req-7ff53d51-3b16-4513-b072-6c7457999c46 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240869, 'name': PowerOffVM_Task, 'duration_secs': 0.409661} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.494139] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ff53d51-3b16-4513-b072-6c7457999c46 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1114.494139] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff53d51-3b16-4513-b072-6c7457999c46 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1114.494337] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2a940e6a-4800-4988-8b73-f112d7ff25ce {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.562156] env[68674]: DEBUG oslo_vmware.api [None req-f4f7039f-0687-42e0-a21d-3143713adadd tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240870, 'name': PowerOffVM_Task, 'duration_secs': 0.346235} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.562465] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4f7039f-0687-42e0-a21d-3143713adadd tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1114.562637] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f4f7039f-0687-42e0-a21d-3143713adadd tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1114.562906] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-25336a7b-d5e1-4308-8c0a-03b5738dbe93 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.566460] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff53d51-3b16-4513-b072-6c7457999c46 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1114.566664] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff53d51-3b16-4513-b072-6c7457999c46 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1114.566842] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ff53d51-3b16-4513-b072-6c7457999c46 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Deleting the datastore file [datastore2] 66f4ab32-ef66-4d1d-93b6-775d59ce3c41 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1114.567099] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b4ce61ec-750f-4e69-889b-f2b3d2bacf23 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.574721] env[68674]: DEBUG oslo_vmware.api [None req-7ff53d51-3b16-4513-b072-6c7457999c46 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for the task: (returnval){ [ 1114.574721] env[68674]: value = "task-3240873" [ 1114.574721] env[68674]: _type = "Task" [ 1114.574721] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.583688] env[68674]: DEBUG oslo_vmware.api [None req-7ff53d51-3b16-4513-b072-6c7457999c46 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240873, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.602874] env[68674]: DEBUG oslo_vmware.api [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528c0052-88f8-0726-2402-feefa89871d3, 'name': SearchDatastore_Task, 'duration_secs': 0.011418} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.602874] env[68674]: DEBUG oslo_concurrency.lockutils [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1114.603180] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 8f183286-f908-4d05-9a61-d6b1bf10dfb9/8f183286-f908-4d05-9a61-d6b1bf10dfb9.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1114.603471] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-344076e5-ad67-4789-87a5-a3f694998afc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.609339] env[68674]: DEBUG oslo_vmware.api [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1114.609339] env[68674]: value = "task-3240874" [ 1114.609339] env[68674]: _type = "Task" [ 1114.609339] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.619218] env[68674]: DEBUG oslo_vmware.api [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240874, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.632833] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f4f7039f-0687-42e0-a21d-3143713adadd tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1114.632833] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f4f7039f-0687-42e0-a21d-3143713adadd tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1114.632959] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4f7039f-0687-42e0-a21d-3143713adadd tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Deleting the datastore file [datastore2] d77d24ac-b44d-4014-83eb-f486db74ab0b {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1114.633208] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5d6b5d9d-baf4-4a25-82b6-62f936ad600a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.639222] env[68674]: DEBUG oslo_vmware.api [None req-f4f7039f-0687-42e0-a21d-3143713adadd tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1114.639222] env[68674]: value = "task-3240875" [ 1114.639222] env[68674]: _type = "Task" [ 1114.639222] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.647688] env[68674]: DEBUG oslo_vmware.api [None req-f4f7039f-0687-42e0-a21d-3143713adadd tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240875, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.814948] env[68674]: DEBUG nova.network.neutron [req-0a82fd76-af47-4685-bc18-8810cdccaf32 req-a423edf7-85b9-4c4c-ae54-ab2e119ed36a service nova] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Updated VIF entry in instance network info cache for port 3660c8d4-d8be-4132-b92b-f96aa37b627b. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1114.815459] env[68674]: DEBUG nova.network.neutron [req-0a82fd76-af47-4685-bc18-8810cdccaf32 req-a423edf7-85b9-4c4c-ae54-ab2e119ed36a service nova] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Updating instance_info_cache with network_info: [{"id": "3660c8d4-d8be-4132-b92b-f96aa37b627b", "address": "fa:16:3e:60:3b:77", "network": {"id": "eae87694-bbf6-4eed-9305-26be80e0529b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1262353116-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c958fcb56a934ef7919b76aa2a193429", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3660c8d4-d8", "ovs_interfaceid": "3660c8d4-d8be-4132-b92b-f96aa37b627b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1114.843748] env[68674]: DEBUG oslo_concurrency.lockutils [req-4968a91e-0680-42dc-bcac-162165216b78 req-cd3521e3-a8b9-426c-8f41-83aabe7120d4 service nova] Releasing lock "refresh_cache-8f183286-f908-4d05-9a61-d6b1bf10dfb9" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1114.883684] env[68674]: DEBUG oslo_vmware.api [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240868, 'name': ReconfigVM_Task, 'duration_secs': 0.816342} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.883861] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Reconfigured VM instance instance-00000072 to attach disk [datastore1] 0de73fea-8e2d-47ff-a87e-a83708f9b4ad/0de73fea-8e2d-47ff-a87e-a83708f9b4ad.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1114.884528] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d485b34b-2783-4814-aca6-5d9e35d36a86 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.892347] env[68674]: DEBUG oslo_vmware.api [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1114.892347] env[68674]: value = "task-3240876" [ 1114.892347] env[68674]: _type = "Task" [ 1114.892347] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.903438] env[68674]: DEBUG oslo_vmware.api [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240876, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.973936] env[68674]: DEBUG nova.scheduler.client.report [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1115.087870] env[68674]: DEBUG oslo_vmware.api [None req-7ff53d51-3b16-4513-b072-6c7457999c46 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Task: {'id': task-3240873, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192249} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.088266] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ff53d51-3b16-4513-b072-6c7457999c46 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1115.088332] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff53d51-3b16-4513-b072-6c7457999c46 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1115.088530] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff53d51-3b16-4513-b072-6c7457999c46 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1115.090421] env[68674]: INFO nova.compute.manager [None req-7ff53d51-3b16-4513-b072-6c7457999c46 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1115.090421] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7ff53d51-3b16-4513-b072-6c7457999c46 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1115.090421] env[68674]: DEBUG nova.compute.manager [-] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1115.090421] env[68674]: DEBUG nova.network.neutron [-] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1115.127614] env[68674]: DEBUG oslo_vmware.api [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240874, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.151028] env[68674]: DEBUG oslo_vmware.api [None req-f4f7039f-0687-42e0-a21d-3143713adadd tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240875, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.254608} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.151028] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4f7039f-0687-42e0-a21d-3143713adadd tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1115.151028] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f4f7039f-0687-42e0-a21d-3143713adadd tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1115.151028] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f4f7039f-0687-42e0-a21d-3143713adadd tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1115.151028] env[68674]: INFO nova.compute.manager [None req-f4f7039f-0687-42e0-a21d-3143713adadd tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1115.151348] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f4f7039f-0687-42e0-a21d-3143713adadd tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1115.151402] env[68674]: DEBUG nova.compute.manager [-] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1115.151494] env[68674]: DEBUG nova.network.neutron [-] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1115.319995] env[68674]: DEBUG oslo_concurrency.lockutils [req-0a82fd76-af47-4685-bc18-8810cdccaf32 req-a423edf7-85b9-4c4c-ae54-ab2e119ed36a service nova] Releasing lock "refresh_cache-23891bad-1b63-4237-9243-78954cf67d52" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1115.405941] env[68674]: DEBUG oslo_vmware.api [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240876, 'name': Rename_Task, 'duration_secs': 0.265625} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.405941] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1115.405941] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-152ad973-771a-46b7-9f0a-8dabded90f6f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.413230] env[68674]: DEBUG oslo_vmware.api [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1115.413230] env[68674]: value = "task-3240877" [ 1115.413230] env[68674]: _type = "Task" [ 1115.413230] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.420160] env[68674]: DEBUG oslo_vmware.api [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240877, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.483462] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68674) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1115.483462] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.827s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1115.483462] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.595s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1115.483462] env[68674]: INFO nova.compute.claims [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1115.486881] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1115.487265] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Cleaning up deleted instances {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 1115.624051] env[68674]: DEBUG oslo_vmware.api [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240874, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.634095] env[68674]: DEBUG nova.compute.manager [req-95e41feb-b7a6-4274-b667-abfbd8805c0c req-9e2f2221-7dd4-47a2-8a4e-ea55cb74d4dc service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Received event network-vif-deleted-270836ed-f229-45ed-b23b-58f26fa997be {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1115.634656] env[68674]: INFO nova.compute.manager [req-95e41feb-b7a6-4274-b667-abfbd8805c0c req-9e2f2221-7dd4-47a2-8a4e-ea55cb74d4dc service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Neutron deleted interface 270836ed-f229-45ed-b23b-58f26fa997be; detaching it from the instance and deleting it from the info cache [ 1115.634656] env[68674]: DEBUG nova.network.neutron [req-95e41feb-b7a6-4274-b667-abfbd8805c0c req-9e2f2221-7dd4-47a2-8a4e-ea55cb74d4dc service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1115.767818] env[68674]: DEBUG nova.compute.manager [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Stashing vm_state: active {{(pid=68674) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1115.864030] env[68674]: DEBUG nova.network.neutron [-] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1115.920676] env[68674]: DEBUG oslo_vmware.api [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240877, 'name': PowerOnVM_Task, 'duration_secs': 0.47717} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.920900] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1115.921114] env[68674]: INFO nova.compute.manager [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Took 7.24 seconds to spawn the instance on the hypervisor. [ 1115.921306] env[68674]: DEBUG nova.compute.manager [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1115.922087] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a67cc15-82a9-4618-9d64-104d24fdc0d7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.004198] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] There are 57 instances to clean {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 1116.004391] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: caed484b-6fb0-41f2-a35f-8f85117dcf15] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1116.098775] env[68674]: DEBUG nova.compute.manager [req-b2e5296b-35a2-4fc2-901a-feec1f15cacb req-c72bdf06-989d-4fd7-ba13-5bc6356f5dae service nova] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Received event network-changed-15b18361-a1c9-4dab-bcaf-6a40837d6bbe {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1116.099047] env[68674]: DEBUG nova.compute.manager [req-b2e5296b-35a2-4fc2-901a-feec1f15cacb req-c72bdf06-989d-4fd7-ba13-5bc6356f5dae service nova] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Refreshing instance network info cache due to event network-changed-15b18361-a1c9-4dab-bcaf-6a40837d6bbe. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1116.099312] env[68674]: DEBUG oslo_concurrency.lockutils [req-b2e5296b-35a2-4fc2-901a-feec1f15cacb req-c72bdf06-989d-4fd7-ba13-5bc6356f5dae service nova] Acquiring lock "refresh_cache-c876b288-de2a-4195-bfef-88f38e219d9a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1116.099492] env[68674]: DEBUG oslo_concurrency.lockutils [req-b2e5296b-35a2-4fc2-901a-feec1f15cacb req-c72bdf06-989d-4fd7-ba13-5bc6356f5dae service nova] Acquired lock "refresh_cache-c876b288-de2a-4195-bfef-88f38e219d9a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1116.099695] env[68674]: DEBUG nova.network.neutron [req-b2e5296b-35a2-4fc2-901a-feec1f15cacb req-c72bdf06-989d-4fd7-ba13-5bc6356f5dae service nova] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Refreshing network info cache for port 15b18361-a1c9-4dab-bcaf-6a40837d6bbe {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1116.122633] env[68674]: DEBUG oslo_vmware.api [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240874, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.137071] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2c5057e8-9b99-49f4-b4e9-9702ff46561d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.146617] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd59b3c8-2523-4211-b896-5cc5aefa4572 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.179899] env[68674]: DEBUG nova.compute.manager [req-95e41feb-b7a6-4274-b667-abfbd8805c0c req-9e2f2221-7dd4-47a2-8a4e-ea55cb74d4dc service nova] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Detach interface failed, port_id=270836ed-f229-45ed-b23b-58f26fa997be, reason: Instance 66f4ab32-ef66-4d1d-93b6-775d59ce3c41 could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1116.208143] env[68674]: DEBUG nova.network.neutron [-] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.286875] env[68674]: DEBUG oslo_concurrency.lockutils [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1116.363313] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquiring lock "ba4bfbb4-a89b-4ab6-964e-792647fd5a89" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1116.363560] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "ba4bfbb4-a89b-4ab6-964e-792647fd5a89" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1116.363738] env[68674]: INFO nova.compute.manager [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Shelving [ 1116.365972] env[68674]: INFO nova.compute.manager [-] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Took 1.28 seconds to deallocate network for instance. [ 1116.438799] env[68674]: INFO nova.compute.manager [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Took 16.33 seconds to build instance. [ 1116.509022] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 95d79ce2-0018-4b8d-8a84-47e2d7f3ee6d] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1116.624870] env[68674]: DEBUG oslo_vmware.api [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240874, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.517305} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.625294] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 8f183286-f908-4d05-9a61-d6b1bf10dfb9/8f183286-f908-4d05-9a61-d6b1bf10dfb9.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1116.625408] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1116.625703] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5a8a5be3-e802-4eb4-a3c6-b9e8e71f3687 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.635194] env[68674]: DEBUG oslo_vmware.api [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1116.635194] env[68674]: value = "task-3240878" [ 1116.635194] env[68674]: _type = "Task" [ 1116.635194] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.649277] env[68674]: DEBUG oslo_vmware.api [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240878, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.708372] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-418a23c6-489f-4785-90bd-5cb3a988e114 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.713632] env[68674]: INFO nova.compute.manager [-] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Took 1.56 seconds to deallocate network for instance. [ 1116.722039] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82a83c03-34a6-49fa-9a31-b41357836930 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.756588] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-702a55a0-f58a-4c58-bfed-db7baffc66cc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.764949] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec9d5e57-4910-49ce-b9b9-c844283d1c4d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.780165] env[68674]: DEBUG nova.compute.provider_tree [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1116.874127] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7ff53d51-3b16-4513-b072-6c7457999c46 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1116.877086] env[68674]: DEBUG nova.network.neutron [req-b2e5296b-35a2-4fc2-901a-feec1f15cacb req-c72bdf06-989d-4fd7-ba13-5bc6356f5dae service nova] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Updated VIF entry in instance network info cache for port 15b18361-a1c9-4dab-bcaf-6a40837d6bbe. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1116.877466] env[68674]: DEBUG nova.network.neutron [req-b2e5296b-35a2-4fc2-901a-feec1f15cacb req-c72bdf06-989d-4fd7-ba13-5bc6356f5dae service nova] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Updating instance_info_cache with network_info: [{"id": "15b18361-a1c9-4dab-bcaf-6a40837d6bbe", "address": "fa:16:3e:99:3b:04", "network": {"id": "eae87694-bbf6-4eed-9305-26be80e0529b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1262353116-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c958fcb56a934ef7919b76aa2a193429", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15b18361-a1", "ovs_interfaceid": "15b18361-a1c9-4dab-bcaf-6a40837d6bbe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.940627] env[68674]: DEBUG oslo_concurrency.lockutils [None req-66ffd798-d364-4c33-b032-b1ca7da0f497 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "0de73fea-8e2d-47ff-a87e-a83708f9b4ad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.842s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1117.011720] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 4214e971-ca72-4c9f-a355-78e5ad8d8219] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1117.013807] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a1d7f9be-72fb-40af-a522-a9f16f8ce964 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "0de73fea-8e2d-47ff-a87e-a83708f9b4ad" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1117.014059] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a1d7f9be-72fb-40af-a522-a9f16f8ce964 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "0de73fea-8e2d-47ff-a87e-a83708f9b4ad" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1117.014236] env[68674]: DEBUG nova.compute.manager [None req-a1d7f9be-72fb-40af-a522-a9f16f8ce964 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1117.015354] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2a5f19b-d8a5-4716-b238-3e4fde8e1d00 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.023078] env[68674]: DEBUG nova.compute.manager [None req-a1d7f9be-72fb-40af-a522-a9f16f8ce964 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68674) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1117.023997] env[68674]: DEBUG nova.objects.instance [None req-a1d7f9be-72fb-40af-a522-a9f16f8ce964 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lazy-loading 'flavor' on Instance uuid 0de73fea-8e2d-47ff-a87e-a83708f9b4ad {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1117.146679] env[68674]: DEBUG oslo_vmware.api [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240878, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065496} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.146917] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1117.147688] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-071cbe1c-443c-474e-9262-3fbce5b63431 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.169818] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] 8f183286-f908-4d05-9a61-d6b1bf10dfb9/8f183286-f908-4d05-9a61-d6b1bf10dfb9.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1117.170336] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c3132b6-910c-4587-a51f-4015afe0dd56 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.190281] env[68674]: DEBUG oslo_vmware.api [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1117.190281] env[68674]: value = "task-3240879" [ 1117.190281] env[68674]: _type = "Task" [ 1117.190281] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.198057] env[68674]: DEBUG oslo_vmware.api [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240879, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.220419] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f4f7039f-0687-42e0-a21d-3143713adadd tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1117.283937] env[68674]: DEBUG nova.scheduler.client.report [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1117.377258] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1117.377558] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8b8133d4-3cda-48d9-a046-325d049b1c15 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.380012] env[68674]: DEBUG oslo_concurrency.lockutils [req-b2e5296b-35a2-4fc2-901a-feec1f15cacb req-c72bdf06-989d-4fd7-ba13-5bc6356f5dae service nova] Releasing lock "refresh_cache-c876b288-de2a-4195-bfef-88f38e219d9a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1117.380261] env[68674]: DEBUG nova.compute.manager [req-b2e5296b-35a2-4fc2-901a-feec1f15cacb req-c72bdf06-989d-4fd7-ba13-5bc6356f5dae service nova] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Received event network-vif-deleted-b383a330-22c5-4c9b-8d66-f4916b6f0977 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1117.380443] env[68674]: INFO nova.compute.manager [req-b2e5296b-35a2-4fc2-901a-feec1f15cacb req-c72bdf06-989d-4fd7-ba13-5bc6356f5dae service nova] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Neutron deleted interface b383a330-22c5-4c9b-8d66-f4916b6f0977; detaching it from the instance and deleting it from the info cache [ 1117.380616] env[68674]: DEBUG nova.network.neutron [req-b2e5296b-35a2-4fc2-901a-feec1f15cacb req-c72bdf06-989d-4fd7-ba13-5bc6356f5dae service nova] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1117.387199] env[68674]: DEBUG oslo_vmware.api [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 1117.387199] env[68674]: value = "task-3240880" [ 1117.387199] env[68674]: _type = "Task" [ 1117.387199] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.396014] env[68674]: DEBUG oslo_vmware.api [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240880, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.515536] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: cbccde73-b903-47f7-9cbc-f0b376a03435] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1117.700717] env[68674]: DEBUG oslo_vmware.api [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240879, 'name': ReconfigVM_Task, 'duration_secs': 0.27446} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.701026] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Reconfigured VM instance instance-00000073 to attach disk [datastore1] 8f183286-f908-4d05-9a61-d6b1bf10dfb9/8f183286-f908-4d05-9a61-d6b1bf10dfb9.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1117.701648] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e3c4e958-6b9b-4857-9701-8065fbe3f532 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.707570] env[68674]: DEBUG oslo_vmware.api [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1117.707570] env[68674]: value = "task-3240881" [ 1117.707570] env[68674]: _type = "Task" [ 1117.707570] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.714682] env[68674]: DEBUG oslo_vmware.api [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240881, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.788985] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.308s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1117.789608] env[68674]: DEBUG nova.compute.manager [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1117.792206] env[68674]: DEBUG oslo_concurrency.lockutils [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 1.505s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1117.884024] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6f5852ab-1188-45d7-bc35-6f333a33a97a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.895288] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01e7b10a-efe3-483d-aef8-13ae42c77a13 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.909272] env[68674]: DEBUG oslo_vmware.api [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240880, 'name': PowerOffVM_Task, 'duration_secs': 0.210447} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.909817] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1117.910549] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a0bbc0b-0ad5-4338-bc6f-d8bd53ac4033 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.937772] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-736286ae-2aa8-4dc2-8daa-c1411da5b7ca {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.940245] env[68674]: DEBUG nova.compute.manager [req-b2e5296b-35a2-4fc2-901a-feec1f15cacb req-c72bdf06-989d-4fd7-ba13-5bc6356f5dae service nova] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Detach interface failed, port_id=b383a330-22c5-4c9b-8d66-f4916b6f0977, reason: Instance d77d24ac-b44d-4014-83eb-f486db74ab0b could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1118.019738] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: bd3ae195-6e01-49d5-9fcf-9520273d9108] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1118.030348] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1d7f9be-72fb-40af-a522-a9f16f8ce964 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1118.030628] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e492a54d-9399-48de-9489-97199ca181ea {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.039167] env[68674]: DEBUG oslo_vmware.api [None req-a1d7f9be-72fb-40af-a522-a9f16f8ce964 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1118.039167] env[68674]: value = "task-3240882" [ 1118.039167] env[68674]: _type = "Task" [ 1118.039167] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.049242] env[68674]: DEBUG oslo_vmware.api [None req-a1d7f9be-72fb-40af-a522-a9f16f8ce964 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240882, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.217909] env[68674]: DEBUG oslo_vmware.api [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240881, 'name': Rename_Task, 'duration_secs': 0.147757} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.217909] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1118.218146] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-374c61a7-3401-4b3e-be3b-86b8246749d8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.223458] env[68674]: DEBUG oslo_vmware.api [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1118.223458] env[68674]: value = "task-3240883" [ 1118.223458] env[68674]: _type = "Task" [ 1118.223458] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.230694] env[68674]: DEBUG oslo_vmware.api [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240883, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.295579] env[68674]: DEBUG nova.compute.utils [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1118.297086] env[68674]: DEBUG nova.compute.manager [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1118.297591] env[68674]: DEBUG nova.network.neutron [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1118.301090] env[68674]: INFO nova.compute.claims [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1118.338710] env[68674]: DEBUG nova.policy [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd28e9b76e01f463bbb375cbd9c51684f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '81afe76c94de4e94b53f15af0ef95e66', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 1118.448495] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Creating Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1118.448900] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-9907063e-a10d-47b3-a8d4-cf22639900f6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.458201] env[68674]: DEBUG oslo_vmware.api [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 1118.458201] env[68674]: value = "task-3240884" [ 1118.458201] env[68674]: _type = "Task" [ 1118.458201] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.467794] env[68674]: DEBUG oslo_vmware.api [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240884, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.522616] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 6dc530e4-fb03-45dc-8d70-9f0e8731dfdc] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1118.549475] env[68674]: DEBUG oslo_vmware.api [None req-a1d7f9be-72fb-40af-a522-a9f16f8ce964 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240882, 'name': PowerOffVM_Task, 'duration_secs': 0.182137} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.549762] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1d7f9be-72fb-40af-a522-a9f16f8ce964 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1118.550377] env[68674]: DEBUG nova.compute.manager [None req-a1d7f9be-72fb-40af-a522-a9f16f8ce964 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1118.550768] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83cd97c9-db5d-403d-b984-8e42100670a4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.590349] env[68674]: DEBUG nova.network.neutron [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Successfully created port: 8f09d5ee-2ec2-4438-98cc-36bdc79d150b {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1118.735538] env[68674]: DEBUG oslo_vmware.api [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240883, 'name': PowerOnVM_Task, 'duration_secs': 0.478347} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.735854] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1118.736105] env[68674]: INFO nova.compute.manager [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Took 7.78 seconds to spawn the instance on the hypervisor. [ 1118.736316] env[68674]: DEBUG nova.compute.manager [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1118.737080] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90f9c0b6-04df-43b3-b74b-d32b67e24a6a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.805636] env[68674]: DEBUG nova.compute.manager [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1118.810822] env[68674]: INFO nova.compute.resource_tracker [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Updating resource usage from migration 6d5e6e1f-457d-47c7-90f2-f0eac17a7e64 [ 1118.972720] env[68674]: DEBUG oslo_vmware.api [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240884, 'name': CreateSnapshot_Task, 'duration_secs': 0.419912} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.973148] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Created Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1118.974140] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34beec31-1c0a-4a50-9674-8ad30e5a4e3c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.025627] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 8740a794-a772-4260-aeb1-51762a586fe2] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1119.064753] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f813bfad-fdeb-4f17-a1bb-b739b558bdc0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.067921] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a1d7f9be-72fb-40af-a522-a9f16f8ce964 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "0de73fea-8e2d-47ff-a87e-a83708f9b4ad" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.054s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1119.074308] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90d2a7a1-ffc1-4704-86cb-f936ee456a39 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.114329] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c057d3e-ebf7-42ff-a750-2c43e05833e5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.122961] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dabf1b8f-ba41-4697-a0dc-a36baa202e60 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.863751] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Creating linked-clone VM from snapshot {{(pid=68674) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1119.864138] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: e9bebb3b-78ff-42b1-a350-efd1db5c6eaa] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1119.868506] env[68674]: DEBUG nova.compute.manager [req-4ed6c470-1699-418a-9bd4-4dd924101b91 req-9afd799b-0ace-4377-8acb-1457c72c287a service nova] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Received event network-changed-0f444395-3a03-4d13-9c2e-20a2965fcb9b {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1119.868693] env[68674]: DEBUG nova.compute.manager [req-4ed6c470-1699-418a-9bd4-4dd924101b91 req-9afd799b-0ace-4377-8acb-1457c72c287a service nova] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Refreshing instance network info cache due to event network-changed-0f444395-3a03-4d13-9c2e-20a2965fcb9b. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1119.868900] env[68674]: DEBUG oslo_concurrency.lockutils [req-4ed6c470-1699-418a-9bd4-4dd924101b91 req-9afd799b-0ace-4377-8acb-1457c72c287a service nova] Acquiring lock "refresh_cache-8f183286-f908-4d05-9a61-d6b1bf10dfb9" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1119.869076] env[68674]: DEBUG oslo_concurrency.lockutils [req-4ed6c470-1699-418a-9bd4-4dd924101b91 req-9afd799b-0ace-4377-8acb-1457c72c287a service nova] Acquired lock "refresh_cache-8f183286-f908-4d05-9a61-d6b1bf10dfb9" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1119.869249] env[68674]: DEBUG nova.network.neutron [req-4ed6c470-1699-418a-9bd4-4dd924101b91 req-9afd799b-0ace-4377-8acb-1457c72c287a service nova] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Refreshing network info cache for port 0f444395-3a03-4d13-9c2e-20a2965fcb9b {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1119.872456] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-87f7ee40-0978-4455-8060-dab7fdccbc4d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.876186] env[68674]: INFO nova.compute.manager [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Took 19.67 seconds to build instance. [ 1119.886296] env[68674]: DEBUG nova.compute.provider_tree [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1119.888868] env[68674]: DEBUG oslo_vmware.api [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 1119.888868] env[68674]: value = "task-3240885" [ 1119.888868] env[68674]: _type = "Task" [ 1119.888868] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.899236] env[68674]: DEBUG oslo_vmware.api [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240885, 'name': CloneVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.170606] env[68674]: DEBUG nova.network.neutron [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Successfully updated port: 8f09d5ee-2ec2-4438-98cc-36bdc79d150b {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1120.375745] env[68674]: DEBUG nova.compute.manager [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1120.376217] env[68674]: DEBUG oslo_concurrency.lockutils [None req-51b6192a-15ca-4a66-9aaf-4b0a464bce17 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "0de73fea-8e2d-47ff-a87e-a83708f9b4ad" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1120.376452] env[68674]: DEBUG oslo_concurrency.lockutils [None req-51b6192a-15ca-4a66-9aaf-4b0a464bce17 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "0de73fea-8e2d-47ff-a87e-a83708f9b4ad" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1120.376649] env[68674]: DEBUG oslo_concurrency.lockutils [None req-51b6192a-15ca-4a66-9aaf-4b0a464bce17 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "0de73fea-8e2d-47ff-a87e-a83708f9b4ad-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1120.376830] env[68674]: DEBUG oslo_concurrency.lockutils [None req-51b6192a-15ca-4a66-9aaf-4b0a464bce17 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "0de73fea-8e2d-47ff-a87e-a83708f9b4ad-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1120.376994] env[68674]: DEBUG oslo_concurrency.lockutils [None req-51b6192a-15ca-4a66-9aaf-4b0a464bce17 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "0de73fea-8e2d-47ff-a87e-a83708f9b4ad-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1120.378531] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 79ee95b6-7321-4e33-a0e4-2c8ed1bc1031] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1120.380311] env[68674]: INFO nova.compute.manager [None req-51b6192a-15ca-4a66-9aaf-4b0a464bce17 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Terminating instance [ 1120.383504] env[68674]: DEBUG oslo_concurrency.lockutils [None req-adc80889-9a80-4b56-9720-447f92b6e158 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "8f183286-f908-4d05-9a61-d6b1bf10dfb9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.187s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1120.390939] env[68674]: DEBUG nova.scheduler.client.report [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1120.406236] env[68674]: DEBUG oslo_vmware.api [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240885, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.415673] env[68674]: DEBUG nova.virt.hardware [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1120.415908] env[68674]: DEBUG nova.virt.hardware [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1120.416081] env[68674]: DEBUG nova.virt.hardware [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1120.416268] env[68674]: DEBUG nova.virt.hardware [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1120.416416] env[68674]: DEBUG nova.virt.hardware [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1120.416573] env[68674]: DEBUG nova.virt.hardware [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1120.416787] env[68674]: DEBUG nova.virt.hardware [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1120.417035] env[68674]: DEBUG nova.virt.hardware [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1120.417247] env[68674]: DEBUG nova.virt.hardware [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1120.417487] env[68674]: DEBUG nova.virt.hardware [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1120.417661] env[68674]: DEBUG nova.virt.hardware [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1120.418894] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-980e39fd-c6d9-4502-8d1a-b92f9e3bedaa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.427365] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-690842d5-b979-4240-80f8-e3f477df0798 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.616061] env[68674]: DEBUG nova.network.neutron [req-4ed6c470-1699-418a-9bd4-4dd924101b91 req-9afd799b-0ace-4377-8acb-1457c72c287a service nova] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Updated VIF entry in instance network info cache for port 0f444395-3a03-4d13-9c2e-20a2965fcb9b. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1120.616570] env[68674]: DEBUG nova.network.neutron [req-4ed6c470-1699-418a-9bd4-4dd924101b91 req-9afd799b-0ace-4377-8acb-1457c72c287a service nova] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Updating instance_info_cache with network_info: [{"id": "0f444395-3a03-4d13-9c2e-20a2965fcb9b", "address": "fa:16:3e:34:66:f6", "network": {"id": "cd9a6296-fa96-4117-b8b5-3884d0d16745", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1543887384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61ea6bfeb37d470a970e9c98e4827ade", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f444395-3a", "ovs_interfaceid": "0f444395-3a03-4d13-9c2e-20a2965fcb9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1120.673649] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "refresh_cache-6cf18175-1436-4ba5-b4b3-8641ec6bdad1" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1120.673795] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquired lock "refresh_cache-6cf18175-1436-4ba5-b4b3-8641ec6bdad1" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1120.673940] env[68674]: DEBUG nova.network.neutron [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1120.885879] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 182deaf0-c20a-4041-8f41-81786d6b053e] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1120.888480] env[68674]: DEBUG nova.compute.manager [None req-51b6192a-15ca-4a66-9aaf-4b0a464bce17 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1120.888871] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-51b6192a-15ca-4a66-9aaf-4b0a464bce17 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1120.890023] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f2f7c5a-eeff-4494-be15-383253f117c3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.895992] env[68674]: DEBUG oslo_concurrency.lockutils [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 3.104s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1120.896308] env[68674]: INFO nova.compute.manager [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Migrating [ 1120.904899] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7ff53d51-3b16-4513-b072-6c7457999c46 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.031s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1120.905158] env[68674]: DEBUG nova.objects.instance [None req-7ff53d51-3b16-4513-b072-6c7457999c46 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lazy-loading 'resources' on Instance uuid 66f4ab32-ef66-4d1d-93b6-775d59ce3c41 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1120.909027] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-51b6192a-15ca-4a66-9aaf-4b0a464bce17 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1120.912100] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f1d1c9ff-3f01-487a-b940-0f0d1253c869 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.924331] env[68674]: DEBUG oslo_vmware.api [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240885, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.984544] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-51b6192a-15ca-4a66-9aaf-4b0a464bce17 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1120.984784] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-51b6192a-15ca-4a66-9aaf-4b0a464bce17 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1120.985043] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-51b6192a-15ca-4a66-9aaf-4b0a464bce17 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Deleting the datastore file [datastore1] 0de73fea-8e2d-47ff-a87e-a83708f9b4ad {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1120.985344] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3598cac9-3f1d-4d0a-9d7a-2316239e0ada {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.991612] env[68674]: DEBUG oslo_vmware.api [None req-51b6192a-15ca-4a66-9aaf-4b0a464bce17 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1120.991612] env[68674]: value = "task-3240887" [ 1120.991612] env[68674]: _type = "Task" [ 1120.991612] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.000204] env[68674]: DEBUG oslo_vmware.api [None req-51b6192a-15ca-4a66-9aaf-4b0a464bce17 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240887, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.119420] env[68674]: DEBUG oslo_concurrency.lockutils [req-4ed6c470-1699-418a-9bd4-4dd924101b91 req-9afd799b-0ace-4377-8acb-1457c72c287a service nova] Releasing lock "refresh_cache-8f183286-f908-4d05-9a61-d6b1bf10dfb9" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1121.208569] env[68674]: DEBUG nova.network.neutron [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1121.344849] env[68674]: DEBUG nova.network.neutron [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Updating instance_info_cache with network_info: [{"id": "8f09d5ee-2ec2-4438-98cc-36bdc79d150b", "address": "fa:16:3e:ba:6d:d0", "network": {"id": "14f41484-287c-4789-9e0c-fcc5f0e92e0d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-787923662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81afe76c94de4e94b53f15af0ef95e66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "877ed63d-906e-4bd5-a1fc-7e82d172d41e", "external-id": "nsx-vlan-transportzone-642", "segmentation_id": 642, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f09d5ee-2e", "ovs_interfaceid": "8f09d5ee-2ec2-4438-98cc-36bdc79d150b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1121.390896] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: cab97ca7-968b-4d40-bb1f-2244469e1b56] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1121.417335] env[68674]: DEBUG oslo_vmware.api [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240885, 'name': CloneVM_Task} progress is 95%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.419916] env[68674]: DEBUG oslo_concurrency.lockutils [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "refresh_cache-c876b288-de2a-4195-bfef-88f38e219d9a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1121.420180] env[68674]: DEBUG oslo_concurrency.lockutils [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquired lock "refresh_cache-c876b288-de2a-4195-bfef-88f38e219d9a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1121.420415] env[68674]: DEBUG nova.network.neutron [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1121.504448] env[68674]: DEBUG oslo_vmware.api [None req-51b6192a-15ca-4a66-9aaf-4b0a464bce17 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240887, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192548} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.504715] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-51b6192a-15ca-4a66-9aaf-4b0a464bce17 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1121.504898] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-51b6192a-15ca-4a66-9aaf-4b0a464bce17 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1121.505177] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-51b6192a-15ca-4a66-9aaf-4b0a464bce17 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1121.505351] env[68674]: INFO nova.compute.manager [None req-51b6192a-15ca-4a66-9aaf-4b0a464bce17 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1121.505588] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-51b6192a-15ca-4a66-9aaf-4b0a464bce17 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1121.505777] env[68674]: DEBUG nova.compute.manager [-] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1121.505871] env[68674]: DEBUG nova.network.neutron [-] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1121.621061] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fd2f724-1c46-45a0-8ac4-653daf77e332 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.630284] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ee5d854-dec9-48ff-9523-21fc6f5b15ec {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.661920] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a03799a0-d80e-4014-8dc4-0a7475994852 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.671920] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b68387c5-8ca2-40d4-93fd-669c9ab0d01d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.684917] env[68674]: DEBUG nova.compute.provider_tree [None req-7ff53d51-3b16-4513-b072-6c7457999c46 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1121.700657] env[68674]: DEBUG nova.compute.manager [req-e0a44adb-5616-48c2-866f-622b9143263b req-3535b7a0-eba5-465e-9652-fe8eb2848357 service nova] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Received event network-vif-plugged-8f09d5ee-2ec2-4438-98cc-36bdc79d150b {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1121.700879] env[68674]: DEBUG oslo_concurrency.lockutils [req-e0a44adb-5616-48c2-866f-622b9143263b req-3535b7a0-eba5-465e-9652-fe8eb2848357 service nova] Acquiring lock "6cf18175-1436-4ba5-b4b3-8641ec6bdad1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1121.701095] env[68674]: DEBUG oslo_concurrency.lockutils [req-e0a44adb-5616-48c2-866f-622b9143263b req-3535b7a0-eba5-465e-9652-fe8eb2848357 service nova] Lock "6cf18175-1436-4ba5-b4b3-8641ec6bdad1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1121.701262] env[68674]: DEBUG oslo_concurrency.lockutils [req-e0a44adb-5616-48c2-866f-622b9143263b req-3535b7a0-eba5-465e-9652-fe8eb2848357 service nova] Lock "6cf18175-1436-4ba5-b4b3-8641ec6bdad1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1121.701454] env[68674]: DEBUG nova.compute.manager [req-e0a44adb-5616-48c2-866f-622b9143263b req-3535b7a0-eba5-465e-9652-fe8eb2848357 service nova] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] No waiting events found dispatching network-vif-plugged-8f09d5ee-2ec2-4438-98cc-36bdc79d150b {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1121.701680] env[68674]: WARNING nova.compute.manager [req-e0a44adb-5616-48c2-866f-622b9143263b req-3535b7a0-eba5-465e-9652-fe8eb2848357 service nova] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Received unexpected event network-vif-plugged-8f09d5ee-2ec2-4438-98cc-36bdc79d150b for instance with vm_state building and task_state spawning. [ 1121.701839] env[68674]: DEBUG nova.compute.manager [req-e0a44adb-5616-48c2-866f-622b9143263b req-3535b7a0-eba5-465e-9652-fe8eb2848357 service nova] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Received event network-changed-8f09d5ee-2ec2-4438-98cc-36bdc79d150b {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1121.701996] env[68674]: DEBUG nova.compute.manager [req-e0a44adb-5616-48c2-866f-622b9143263b req-3535b7a0-eba5-465e-9652-fe8eb2848357 service nova] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Refreshing instance network info cache due to event network-changed-8f09d5ee-2ec2-4438-98cc-36bdc79d150b. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1121.702183] env[68674]: DEBUG oslo_concurrency.lockutils [req-e0a44adb-5616-48c2-866f-622b9143263b req-3535b7a0-eba5-465e-9652-fe8eb2848357 service nova] Acquiring lock "refresh_cache-6cf18175-1436-4ba5-b4b3-8641ec6bdad1" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1121.849055] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Releasing lock "refresh_cache-6cf18175-1436-4ba5-b4b3-8641ec6bdad1" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1121.849055] env[68674]: DEBUG nova.compute.manager [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Instance network_info: |[{"id": "8f09d5ee-2ec2-4438-98cc-36bdc79d150b", "address": "fa:16:3e:ba:6d:d0", "network": {"id": "14f41484-287c-4789-9e0c-fcc5f0e92e0d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-787923662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81afe76c94de4e94b53f15af0ef95e66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "877ed63d-906e-4bd5-a1fc-7e82d172d41e", "external-id": "nsx-vlan-transportzone-642", "segmentation_id": 642, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f09d5ee-2e", "ovs_interfaceid": "8f09d5ee-2ec2-4438-98cc-36bdc79d150b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1121.849055] env[68674]: DEBUG oslo_concurrency.lockutils [req-e0a44adb-5616-48c2-866f-622b9143263b req-3535b7a0-eba5-465e-9652-fe8eb2848357 service nova] Acquired lock "refresh_cache-6cf18175-1436-4ba5-b4b3-8641ec6bdad1" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1121.849055] env[68674]: DEBUG nova.network.neutron [req-e0a44adb-5616-48c2-866f-622b9143263b req-3535b7a0-eba5-465e-9652-fe8eb2848357 service nova] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Refreshing network info cache for port 8f09d5ee-2ec2-4438-98cc-36bdc79d150b {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1121.850401] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ba:6d:d0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '877ed63d-906e-4bd5-a1fc-7e82d172d41e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8f09d5ee-2ec2-4438-98cc-36bdc79d150b', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1121.857946] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1121.860798] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1121.861332] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b9323ac6-01a9-47b7-873f-4742dc483f95 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.883035] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1121.883035] env[68674]: value = "task-3240888" [ 1121.883035] env[68674]: _type = "Task" [ 1121.883035] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.891204] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240888, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.894054] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 8d810cc0-3f85-49c9-9d7d-8e1711a97015] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1121.914173] env[68674]: DEBUG oslo_vmware.api [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240885, 'name': CloneVM_Task, 'duration_secs': 1.620593} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.914603] env[68674]: INFO nova.virt.vmwareapi.vmops [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Created linked-clone VM from snapshot [ 1121.915255] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1108ed8-8b19-4f02-bd02-0c607509cf27 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.925761] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Uploading image 5bc5480f-c581-4561-8a80-b3996f994c28 {{(pid=68674) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1121.953992] env[68674]: DEBUG oslo_vmware.rw_handles [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1121.953992] env[68674]: value = "vm-647695" [ 1121.953992] env[68674]: _type = "VirtualMachine" [ 1121.953992] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1121.954289] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-7ef4e410-1daa-43cf-8be1-7f4a8f32fa82 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.962115] env[68674]: DEBUG oslo_vmware.rw_handles [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lease: (returnval){ [ 1121.962115] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525815f9-1587-6ba3-e3a5-2e9930d9274e" [ 1121.962115] env[68674]: _type = "HttpNfcLease" [ 1121.962115] env[68674]: } obtained for exporting VM: (result){ [ 1121.962115] env[68674]: value = "vm-647695" [ 1121.962115] env[68674]: _type = "VirtualMachine" [ 1121.962115] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1121.962510] env[68674]: DEBUG oslo_vmware.api [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the lease: (returnval){ [ 1121.962510] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525815f9-1587-6ba3-e3a5-2e9930d9274e" [ 1121.962510] env[68674]: _type = "HttpNfcLease" [ 1121.962510] env[68674]: } to be ready. {{(pid=68674) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1121.971484] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1121.971484] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525815f9-1587-6ba3-e3a5-2e9930d9274e" [ 1121.971484] env[68674]: _type = "HttpNfcLease" [ 1121.971484] env[68674]: } is initializing. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1122.072865] env[68674]: DEBUG nova.network.neutron [req-e0a44adb-5616-48c2-866f-622b9143263b req-3535b7a0-eba5-465e-9652-fe8eb2848357 service nova] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Updated VIF entry in instance network info cache for port 8f09d5ee-2ec2-4438-98cc-36bdc79d150b. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1122.072968] env[68674]: DEBUG nova.network.neutron [req-e0a44adb-5616-48c2-866f-622b9143263b req-3535b7a0-eba5-465e-9652-fe8eb2848357 service nova] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Updating instance_info_cache with network_info: [{"id": "8f09d5ee-2ec2-4438-98cc-36bdc79d150b", "address": "fa:16:3e:ba:6d:d0", "network": {"id": "14f41484-287c-4789-9e0c-fcc5f0e92e0d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-787923662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81afe76c94de4e94b53f15af0ef95e66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "877ed63d-906e-4bd5-a1fc-7e82d172d41e", "external-id": "nsx-vlan-transportzone-642", "segmentation_id": 642, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f09d5ee-2e", "ovs_interfaceid": "8f09d5ee-2ec2-4438-98cc-36bdc79d150b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1122.129810] env[68674]: DEBUG nova.network.neutron [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Updating instance_info_cache with network_info: [{"id": "15b18361-a1c9-4dab-bcaf-6a40837d6bbe", "address": "fa:16:3e:99:3b:04", "network": {"id": "eae87694-bbf6-4eed-9305-26be80e0529b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1262353116-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c958fcb56a934ef7919b76aa2a193429", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15b18361-a1", "ovs_interfaceid": "15b18361-a1c9-4dab-bcaf-6a40837d6bbe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1122.188499] env[68674]: DEBUG nova.scheduler.client.report [None req-7ff53d51-3b16-4513-b072-6c7457999c46 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1122.254411] env[68674]: DEBUG nova.network.neutron [-] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1122.394024] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240888, 'name': CreateVM_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.396662] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: f6f5fb73-521a-4c83-93ea-a1eb2af2e142] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1122.471999] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1122.471999] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525815f9-1587-6ba3-e3a5-2e9930d9274e" [ 1122.471999] env[68674]: _type = "HttpNfcLease" [ 1122.471999] env[68674]: } is ready. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1122.472315] env[68674]: DEBUG oslo_vmware.rw_handles [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1122.472315] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525815f9-1587-6ba3-e3a5-2e9930d9274e" [ 1122.472315] env[68674]: _type = "HttpNfcLease" [ 1122.472315] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1122.473013] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8a1c6cc-75ab-47f4-b533-f83b139a889a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.480108] env[68674]: DEBUG oslo_vmware.rw_handles [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525af175-b1ef-431f-e3d0-502f26b946b0/disk-0.vmdk from lease info. {{(pid=68674) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1122.480287] env[68674]: DEBUG oslo_vmware.rw_handles [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525af175-b1ef-431f-e3d0-502f26b946b0/disk-0.vmdk for reading. {{(pid=68674) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1122.575960] env[68674]: DEBUG oslo_concurrency.lockutils [req-e0a44adb-5616-48c2-866f-622b9143263b req-3535b7a0-eba5-465e-9652-fe8eb2848357 service nova] Releasing lock "refresh_cache-6cf18175-1436-4ba5-b4b3-8641ec6bdad1" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1122.587433] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4a43759f-2eb1-493f-b753-c789b62b4c98 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.632467] env[68674]: DEBUG oslo_concurrency.lockutils [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Releasing lock "refresh_cache-c876b288-de2a-4195-bfef-88f38e219d9a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1122.692685] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7ff53d51-3b16-4513-b072-6c7457999c46 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.788s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1122.694943] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f4f7039f-0687-42e0-a21d-3143713adadd tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.475s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1122.695239] env[68674]: DEBUG nova.objects.instance [None req-f4f7039f-0687-42e0-a21d-3143713adadd tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lazy-loading 'resources' on Instance uuid d77d24ac-b44d-4014-83eb-f486db74ab0b {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1122.714446] env[68674]: INFO nova.scheduler.client.report [None req-7ff53d51-3b16-4513-b072-6c7457999c46 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Deleted allocations for instance 66f4ab32-ef66-4d1d-93b6-775d59ce3c41 [ 1122.759324] env[68674]: INFO nova.compute.manager [-] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Took 1.25 seconds to deallocate network for instance. [ 1122.894445] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240888, 'name': CreateVM_Task, 'duration_secs': 0.751681} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.894673] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1122.895370] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1122.895564] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1122.896074] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1122.896403] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3eb8f06b-0cb0-4afc-8806-12594abb8c3c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.899624] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 7a13c52a-328a-4baa-827f-4f2e9cd29269] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1122.902514] env[68674]: DEBUG oslo_vmware.api [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1122.902514] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5236fe50-58b5-fba9-eabb-5da813eb596c" [ 1122.902514] env[68674]: _type = "Task" [ 1122.902514] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.911179] env[68674]: DEBUG oslo_vmware.api [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5236fe50-58b5-fba9-eabb-5da813eb596c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.222786] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7ff53d51-3b16-4513-b072-6c7457999c46 tempest-AttachInterfacesTestJSON-927775456 tempest-AttachInterfacesTestJSON-927775456-project-member] Lock "66f4ab32-ef66-4d1d-93b6-775d59ce3c41" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.770s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1123.266982] env[68674]: DEBUG oslo_concurrency.lockutils [None req-51b6192a-15ca-4a66-9aaf-4b0a464bce17 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1123.409181] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: bda8e2f0-0d06-41f2-b4e8-d8ba7f9eca77] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1123.429017] env[68674]: DEBUG oslo_vmware.api [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5236fe50-58b5-fba9-eabb-5da813eb596c, 'name': SearchDatastore_Task, 'duration_secs': 0.010334} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.429017] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1123.429017] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1123.429349] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1123.429638] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1123.430058] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1123.430597] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-203dd7e4-2b28-45a9-912c-52aca32905be {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.442249] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1123.443154] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1123.443932] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9931904b-2f4e-4805-a5d9-939e48b33496 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.456043] env[68674]: DEBUG oslo_vmware.api [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1123.456043] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a5b567-764e-3df0-5374-10d40710fd04" [ 1123.456043] env[68674]: _type = "Task" [ 1123.456043] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.466703] env[68674]: DEBUG oslo_vmware.api [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a5b567-764e-3df0-5374-10d40710fd04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.466703] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2c1f3bb-c3dd-4701-904e-e936416ec43e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.475383] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-302dc8c9-a4c6-4784-9802-f2f78d037a2f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.508521] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99d0d4c6-659b-4e18-9866-edf8cdeabc17 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.517264] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cdf4b4e-f8e6-4916-95c2-1aea40c72664 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.533388] env[68674]: DEBUG nova.compute.provider_tree [None req-f4f7039f-0687-42e0-a21d-3143713adadd tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1123.846237] env[68674]: DEBUG nova.compute.manager [req-e2c8aa54-a4d5-4761-a4e9-7cffce6f50f7 req-2d03e7b9-35d7-45e7-9d64-d6da5eaea3ec service nova] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Received event network-vif-deleted-bc3acca0-ced5-49ff-9839-c40f7e6bc6f9 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1123.918870] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: fa8c58b7-a462-437f-b1ed-57fef6aa3903] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1123.970411] env[68674]: DEBUG oslo_vmware.api [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a5b567-764e-3df0-5374-10d40710fd04, 'name': SearchDatastore_Task, 'duration_secs': 0.015516} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.971595] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38ad1a5c-682c-4873-8762-3214aa01ab9f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.980333] env[68674]: DEBUG oslo_vmware.api [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1123.980333] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527f022d-bd42-73e5-4e34-f4d509420e46" [ 1123.980333] env[68674]: _type = "Task" [ 1123.980333] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.990028] env[68674]: DEBUG oslo_vmware.api [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527f022d-bd42-73e5-4e34-f4d509420e46, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.038302] env[68674]: DEBUG nova.scheduler.client.report [None req-f4f7039f-0687-42e0-a21d-3143713adadd tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1124.148593] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a907418-4042-4bb4-bcd0-075d26862a93 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.168503] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Updating instance 'c876b288-de2a-4195-bfef-88f38e219d9a' progress to 0 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1124.422258] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 2a7a6269-65a8-402c-b174-a4a46d20a33a] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1124.452211] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0e1e6956-91ef-4b18-b349-f7f135f2ebae tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquiring lock "142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1124.452468] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0e1e6956-91ef-4b18-b349-f7f135f2ebae tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1124.492012] env[68674]: DEBUG oslo_vmware.api [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527f022d-bd42-73e5-4e34-f4d509420e46, 'name': SearchDatastore_Task, 'duration_secs': 0.014228} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.492526] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1124.492526] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 6cf18175-1436-4ba5-b4b3-8641ec6bdad1/6cf18175-1436-4ba5-b4b3-8641ec6bdad1.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1124.493356] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5c63b644-256a-4e64-a4a6-c40e536b1451 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.502689] env[68674]: DEBUG oslo_vmware.api [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1124.502689] env[68674]: value = "task-3240890" [ 1124.502689] env[68674]: _type = "Task" [ 1124.502689] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.511380] env[68674]: DEBUG oslo_vmware.api [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240890, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.545594] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f4f7039f-0687-42e0-a21d-3143713adadd tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.850s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1124.549747] env[68674]: DEBUG oslo_concurrency.lockutils [None req-51b6192a-15ca-4a66-9aaf-4b0a464bce17 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.283s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1124.550168] env[68674]: DEBUG nova.objects.instance [None req-51b6192a-15ca-4a66-9aaf-4b0a464bce17 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lazy-loading 'resources' on Instance uuid 0de73fea-8e2d-47ff-a87e-a83708f9b4ad {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1124.583893] env[68674]: INFO nova.scheduler.client.report [None req-f4f7039f-0687-42e0-a21d-3143713adadd tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Deleted allocations for instance d77d24ac-b44d-4014-83eb-f486db74ab0b [ 1124.676228] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1124.676559] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1cdc6ee9-45d7-4ca2-b0ac-69b724acb5d2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.683243] env[68674]: DEBUG oslo_vmware.api [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 1124.683243] env[68674]: value = "task-3240891" [ 1124.683243] env[68674]: _type = "Task" [ 1124.683243] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.692568] env[68674]: DEBUG oslo_vmware.api [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240891, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.927163] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 691f9f14-4f53-46a4-8bf7-d027cfdd37e8] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1124.956146] env[68674]: INFO nova.compute.manager [None req-0e1e6956-91ef-4b18-b349-f7f135f2ebae tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Detaching volume 45dbff42-8058-475f-b6ef-88e2d283c59a [ 1125.007792] env[68674]: INFO nova.virt.block_device [None req-0e1e6956-91ef-4b18-b349-f7f135f2ebae tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Attempting to driver detach volume 45dbff42-8058-475f-b6ef-88e2d283c59a from mountpoint /dev/sdb [ 1125.008316] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e1e6956-91ef-4b18-b349-f7f135f2ebae tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Volume detach. Driver type: vmdk {{(pid=68674) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1125.008530] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e1e6956-91ef-4b18-b349-f7f135f2ebae tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647679', 'volume_id': '45dbff42-8058-475f-b6ef-88e2d283c59a', 'name': 'volume-45dbff42-8058-475f-b6ef-88e2d283c59a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '142e8ede-90e2-47cf-a1b1-8c4fd59eed0a', 'attached_at': '', 'detached_at': '', 'volume_id': '45dbff42-8058-475f-b6ef-88e2d283c59a', 'serial': '45dbff42-8058-475f-b6ef-88e2d283c59a'} {{(pid=68674) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1125.012895] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1fc3d51-126c-4bf9-9c09-9c78edc7eb70 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.025660] env[68674]: DEBUG oslo_vmware.api [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240890, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.044710] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a127010a-fb0d-40d2-beea-bf78d0239007 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.056837] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdd5534f-94bc-4d85-9895-204491070bd4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.083887] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e60ed2d7-e643-4315-9cf4-cce06192a3ef {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.102874] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f4f7039f-0687-42e0-a21d-3143713adadd tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lock "d77d24ac-b44d-4014-83eb-f486db74ab0b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.591s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1125.104269] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e1e6956-91ef-4b18-b349-f7f135f2ebae tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] The volume has not been displaced from its original location: [datastore2] volume-45dbff42-8058-475f-b6ef-88e2d283c59a/volume-45dbff42-8058-475f-b6ef-88e2d283c59a.vmdk. No consolidation needed. {{(pid=68674) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1125.110762] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e1e6956-91ef-4b18-b349-f7f135f2ebae tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Reconfiguring VM instance instance-00000063 to detach disk 2001 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1125.114374] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0fda985b-9966-4b2f-bc29-dd7e5f6b75d6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.136274] env[68674]: DEBUG oslo_vmware.api [None req-0e1e6956-91ef-4b18-b349-f7f135f2ebae tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for the task: (returnval){ [ 1125.136274] env[68674]: value = "task-3240892" [ 1125.136274] env[68674]: _type = "Task" [ 1125.136274] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.146583] env[68674]: DEBUG oslo_vmware.api [None req-0e1e6956-91ef-4b18-b349-f7f135f2ebae tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3240892, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.194339] env[68674]: DEBUG oslo_vmware.api [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240891, 'name': PowerOffVM_Task, 'duration_secs': 0.213972} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.196825] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1125.197049] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Updating instance 'c876b288-de2a-4195-bfef-88f38e219d9a' progress to 17 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1125.282757] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de876dd0-a4fb-41d4-a6a2-ca5888186f29 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.290836] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bc3a427-831e-4296-8954-7b3729968b2e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.325198] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6152c54-4a62-4567-ac83-ef6e1b253dd2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.332574] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc43f15a-cc59-4fec-8698-50541a96c037 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.351111] env[68674]: DEBUG nova.compute.provider_tree [None req-51b6192a-15ca-4a66-9aaf-4b0a464bce17 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1125.430879] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 6af32e52-f10e-47be-ab36-e130614ba9e8] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1125.513854] env[68674]: DEBUG oslo_vmware.api [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240890, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.550904} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.513854] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 6cf18175-1436-4ba5-b4b3-8641ec6bdad1/6cf18175-1436-4ba5-b4b3-8641ec6bdad1.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1125.514193] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1125.514982] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8bc348a7-f1eb-4b4a-9f83-11526174f84b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.521833] env[68674]: DEBUG oslo_vmware.api [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1125.521833] env[68674]: value = "task-3240893" [ 1125.521833] env[68674]: _type = "Task" [ 1125.521833] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.529879] env[68674]: DEBUG oslo_vmware.api [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240893, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.649665] env[68674]: DEBUG oslo_vmware.api [None req-0e1e6956-91ef-4b18-b349-f7f135f2ebae tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3240892, 'name': ReconfigVM_Task, 'duration_secs': 0.391612} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.649981] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e1e6956-91ef-4b18-b349-f7f135f2ebae tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Reconfigured VM instance instance-00000063 to detach disk 2001 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1125.655823] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c5b9f5ba-444f-478b-ad7f-6848aa2b0477 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.673894] env[68674]: DEBUG oslo_vmware.api [None req-0e1e6956-91ef-4b18-b349-f7f135f2ebae tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for the task: (returnval){ [ 1125.673894] env[68674]: value = "task-3240894" [ 1125.673894] env[68674]: _type = "Task" [ 1125.673894] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.684221] env[68674]: DEBUG oslo_vmware.api [None req-0e1e6956-91ef-4b18-b349-f7f135f2ebae tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3240894, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.703960] env[68674]: DEBUG nova.virt.hardware [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1125.703960] env[68674]: DEBUG nova.virt.hardware [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1125.703960] env[68674]: DEBUG nova.virt.hardware [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1125.703960] env[68674]: DEBUG nova.virt.hardware [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1125.704416] env[68674]: DEBUG nova.virt.hardware [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1125.704416] env[68674]: DEBUG nova.virt.hardware [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1125.704565] env[68674]: DEBUG nova.virt.hardware [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1125.704735] env[68674]: DEBUG nova.virt.hardware [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1125.704906] env[68674]: DEBUG nova.virt.hardware [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1125.705114] env[68674]: DEBUG nova.virt.hardware [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1125.705339] env[68674]: DEBUG nova.virt.hardware [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1125.710976] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b58ae7a9-b5ee-4906-9979-63b9b7d13b84 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.728118] env[68674]: DEBUG oslo_vmware.api [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 1125.728118] env[68674]: value = "task-3240895" [ 1125.728118] env[68674]: _type = "Task" [ 1125.728118] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.737034] env[68674]: DEBUG oslo_vmware.api [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240895, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.854816] env[68674]: DEBUG nova.scheduler.client.report [None req-51b6192a-15ca-4a66-9aaf-4b0a464bce17 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1125.934399] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: a33d5b86-32ba-4167-9fcc-4a82f3ad4cc6] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1126.030758] env[68674]: DEBUG oslo_vmware.api [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240893, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.098103} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.031146] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1126.031847] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38b815d7-e750-4c4e-afcc-b48031bdda10 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.057119] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Reconfiguring VM instance instance-00000074 to attach disk [datastore2] 6cf18175-1436-4ba5-b4b3-8641ec6bdad1/6cf18175-1436-4ba5-b4b3-8641ec6bdad1.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1126.057473] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-deeba2ae-ceb0-495d-b88a-c253c380fc0d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.079103] env[68674]: DEBUG oslo_vmware.api [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1126.079103] env[68674]: value = "task-3240896" [ 1126.079103] env[68674]: _type = "Task" [ 1126.079103] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.087635] env[68674]: DEBUG oslo_vmware.api [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240896, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.185224] env[68674]: DEBUG oslo_vmware.api [None req-0e1e6956-91ef-4b18-b349-f7f135f2ebae tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3240894, 'name': ReconfigVM_Task, 'duration_secs': 0.185769} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.185557] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e1e6956-91ef-4b18-b349-f7f135f2ebae tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647679', 'volume_id': '45dbff42-8058-475f-b6ef-88e2d283c59a', 'name': 'volume-45dbff42-8058-475f-b6ef-88e2d283c59a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '142e8ede-90e2-47cf-a1b1-8c4fd59eed0a', 'attached_at': '', 'detached_at': '', 'volume_id': '45dbff42-8058-475f-b6ef-88e2d283c59a', 'serial': '45dbff42-8058-475f-b6ef-88e2d283c59a'} {{(pid=68674) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1126.238943] env[68674]: DEBUG oslo_vmware.api [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240895, 'name': ReconfigVM_Task, 'duration_secs': 0.229734} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.239343] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Updating instance 'c876b288-de2a-4195-bfef-88f38e219d9a' progress to 33 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1126.360327] env[68674]: DEBUG oslo_concurrency.lockutils [None req-51b6192a-15ca-4a66-9aaf-4b0a464bce17 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.811s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1126.388639] env[68674]: INFO nova.scheduler.client.report [None req-51b6192a-15ca-4a66-9aaf-4b0a464bce17 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Deleted allocations for instance 0de73fea-8e2d-47ff-a87e-a83708f9b4ad [ 1126.437604] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: e894cd36-95c8-473b-9bbd-483f11fb5add] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1126.590322] env[68674]: DEBUG oslo_vmware.api [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240896, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.731451] env[68674]: DEBUG nova.objects.instance [None req-0e1e6956-91ef-4b18-b349-f7f135f2ebae tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lazy-loading 'flavor' on Instance uuid 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1126.745946] env[68674]: DEBUG nova.virt.hardware [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1126.746413] env[68674]: DEBUG nova.virt.hardware [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1126.746413] env[68674]: DEBUG nova.virt.hardware [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1126.746584] env[68674]: DEBUG nova.virt.hardware [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1126.746745] env[68674]: DEBUG nova.virt.hardware [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1126.746948] env[68674]: DEBUG nova.virt.hardware [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1126.747239] env[68674]: DEBUG nova.virt.hardware [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1126.747486] env[68674]: DEBUG nova.virt.hardware [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1126.747837] env[68674]: DEBUG nova.virt.hardware [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1126.747897] env[68674]: DEBUG nova.virt.hardware [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1126.748125] env[68674]: DEBUG nova.virt.hardware [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1126.753894] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Reconfiguring VM instance instance-00000071 to detach disk 2000 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1126.755392] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef5dd5a7-03f6-428e-b421-18af78c9a67b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.776989] env[68674]: DEBUG oslo_vmware.api [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 1126.776989] env[68674]: value = "task-3240897" [ 1126.776989] env[68674]: _type = "Task" [ 1126.776989] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.786165] env[68674]: DEBUG oslo_vmware.api [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240897, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.898048] env[68674]: DEBUG oslo_concurrency.lockutils [None req-51b6192a-15ca-4a66-9aaf-4b0a464bce17 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "0de73fea-8e2d-47ff-a87e-a83708f9b4ad" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.521s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1126.941168] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 082fd3a5-b30e-41cc-8fba-dab2802a1e3e] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1127.091546] env[68674]: DEBUG oslo_vmware.api [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240896, 'name': ReconfigVM_Task, 'duration_secs': 0.561304} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.091855] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Reconfigured VM instance instance-00000074 to attach disk [datastore2] 6cf18175-1436-4ba5-b4b3-8641ec6bdad1/6cf18175-1436-4ba5-b4b3-8641ec6bdad1.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1127.093759] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-db67e542-3365-4ec1-99ba-702acbaa40f8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.098921] env[68674]: DEBUG oslo_vmware.api [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1127.098921] env[68674]: value = "task-3240898" [ 1127.098921] env[68674]: _type = "Task" [ 1127.098921] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.113015] env[68674]: DEBUG oslo_vmware.api [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240898, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.287234] env[68674]: DEBUG oslo_vmware.api [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240897, 'name': ReconfigVM_Task, 'duration_secs': 0.170638} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.287891] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Reconfigured VM instance instance-00000071 to detach disk 2000 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1127.288703] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13893599-fecc-4033-b81b-3679c5f1821c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.311404] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] volume-ac75a73a-75e6-493d-9caf-c35a3091e391/volume-ac75a73a-75e6-493d-9caf-c35a3091e391.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1127.311737] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c8a0c43f-f68b-4655-aab1-c945c37cb903 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.330343] env[68674]: DEBUG oslo_vmware.api [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 1127.330343] env[68674]: value = "task-3240899" [ 1127.330343] env[68674]: _type = "Task" [ 1127.330343] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.339124] env[68674]: DEBUG oslo_vmware.api [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240899, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.444663] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 33313b29-abaf-4ff7-9182-abfcfb9b3220] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1127.611761] env[68674]: DEBUG oslo_vmware.api [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240898, 'name': Rename_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.667178] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7be6b6f1-1e61-499c-985b-151e08a0158d tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquiring lock "142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1127.742225] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0e1e6956-91ef-4b18-b349-f7f135f2ebae tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.290s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1127.745261] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7be6b6f1-1e61-499c-985b-151e08a0158d tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.076s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1127.745261] env[68674]: DEBUG nova.compute.manager [None req-7be6b6f1-1e61-499c-985b-151e08a0158d tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1127.745261] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0384392-f15c-4578-a6aa-823b0e926951 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.751684] env[68674]: DEBUG nova.compute.manager [None req-7be6b6f1-1e61-499c-985b-151e08a0158d tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68674) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1127.752294] env[68674]: DEBUG nova.objects.instance [None req-7be6b6f1-1e61-499c-985b-151e08a0158d tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lazy-loading 'flavor' on Instance uuid 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1127.845023] env[68674]: DEBUG oslo_vmware.api [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240899, 'name': ReconfigVM_Task, 'duration_secs': 0.290716} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.845023] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Reconfigured VM instance instance-00000071 to attach disk [datastore1] volume-ac75a73a-75e6-493d-9caf-c35a3091e391/volume-ac75a73a-75e6-493d-9caf-c35a3091e391.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1127.845261] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Updating instance 'c876b288-de2a-4195-bfef-88f38e219d9a' progress to 50 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1127.947919] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: f70145c9-4846-42e1-9c1c-de9759097abd] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1128.111463] env[68674]: DEBUG oslo_vmware.api [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240898, 'name': Rename_Task, 'duration_secs': 0.704522} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.114289] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1128.115125] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e50e7dbf-561f-450c-8cd2-90e7efbf1e59 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.121726] env[68674]: DEBUG oslo_vmware.api [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1128.121726] env[68674]: value = "task-3240900" [ 1128.121726] env[68674]: _type = "Task" [ 1128.121726] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.134196] env[68674]: DEBUG oslo_vmware.api [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240900, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.164711] env[68674]: DEBUG oslo_concurrency.lockutils [None req-828fdbf1-36fb-4698-8085-8dd87b5fe560 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "e371ae6b-44fd-47ce-9c58-8981e7da5cbc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1128.164711] env[68674]: DEBUG oslo_concurrency.lockutils [None req-828fdbf1-36fb-4698-8085-8dd87b5fe560 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "e371ae6b-44fd-47ce-9c58-8981e7da5cbc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1128.164711] env[68674]: DEBUG oslo_concurrency.lockutils [None req-828fdbf1-36fb-4698-8085-8dd87b5fe560 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "e371ae6b-44fd-47ce-9c58-8981e7da5cbc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1128.164711] env[68674]: DEBUG oslo_concurrency.lockutils [None req-828fdbf1-36fb-4698-8085-8dd87b5fe560 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "e371ae6b-44fd-47ce-9c58-8981e7da5cbc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1128.164711] env[68674]: DEBUG oslo_concurrency.lockutils [None req-828fdbf1-36fb-4698-8085-8dd87b5fe560 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "e371ae6b-44fd-47ce-9c58-8981e7da5cbc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1128.165733] env[68674]: INFO nova.compute.manager [None req-828fdbf1-36fb-4698-8085-8dd87b5fe560 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Terminating instance [ 1128.354115] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f860b651-6035-42b8-8fff-814c14420f56 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.376613] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e66b8b9-269e-465c-8a84-b26c08d05d46 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.398025] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Updating instance 'c876b288-de2a-4195-bfef-88f38e219d9a' progress to 67 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1128.453049] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: ffdd1c62-1b4e-40cf-a27e-ff2877439701] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1128.635242] env[68674]: DEBUG oslo_vmware.api [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240900, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.671236] env[68674]: DEBUG nova.compute.manager [None req-828fdbf1-36fb-4698-8085-8dd87b5fe560 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1128.671548] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-828fdbf1-36fb-4698-8085-8dd87b5fe560 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1128.672860] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f776474-098a-4560-bdfd-84a3b5efc2a4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.683125] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-828fdbf1-36fb-4698-8085-8dd87b5fe560 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1128.683125] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-98689639-2835-4a63-bd3d-1455939873a4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.689991] env[68674]: DEBUG oslo_vmware.api [None req-828fdbf1-36fb-4698-8085-8dd87b5fe560 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1128.689991] env[68674]: value = "task-3240901" [ 1128.689991] env[68674]: _type = "Task" [ 1128.689991] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.700927] env[68674]: DEBUG oslo_vmware.api [None req-828fdbf1-36fb-4698-8085-8dd87b5fe560 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240901, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.759830] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7be6b6f1-1e61-499c-985b-151e08a0158d tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1128.760282] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-17fce805-038a-4eee-b1e1-5a7d36e69836 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.769523] env[68674]: DEBUG oslo_vmware.api [None req-7be6b6f1-1e61-499c-985b-151e08a0158d tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for the task: (returnval){ [ 1128.769523] env[68674]: value = "task-3240902" [ 1128.769523] env[68674]: _type = "Task" [ 1128.769523] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.780028] env[68674]: DEBUG oslo_vmware.api [None req-7be6b6f1-1e61-499c-985b-151e08a0158d tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3240902, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.960417] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: e684ec31-b5d9-458c-bbba-36ada7f275bd] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1129.134887] env[68674]: DEBUG oslo_vmware.api [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240900, 'name': PowerOnVM_Task, 'duration_secs': 0.950089} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.135236] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1129.135450] env[68674]: INFO nova.compute.manager [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Took 8.76 seconds to spawn the instance on the hypervisor. [ 1129.135630] env[68674]: DEBUG nova.compute.manager [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1129.137168] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb144083-7f6d-4e07-80ce-ee18e91e1eb4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.200931] env[68674]: DEBUG oslo_vmware.api [None req-828fdbf1-36fb-4698-8085-8dd87b5fe560 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240901, 'name': PowerOffVM_Task, 'duration_secs': 0.250763} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.201950] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-828fdbf1-36fb-4698-8085-8dd87b5fe560 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1129.202153] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-828fdbf1-36fb-4698-8085-8dd87b5fe560 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1129.202413] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-65182fde-3973-4307-90cf-41c20da90439 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.287024] env[68674]: DEBUG oslo_vmware.api [None req-7be6b6f1-1e61-499c-985b-151e08a0158d tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3240902, 'name': PowerOffVM_Task, 'duration_secs': 0.389912} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.287024] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7be6b6f1-1e61-499c-985b-151e08a0158d tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1129.287024] env[68674]: DEBUG nova.compute.manager [None req-7be6b6f1-1e61-499c-985b-151e08a0158d tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1129.287024] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f26cd3-858a-403a-be1f-d1d689f44715 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.291325] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-828fdbf1-36fb-4698-8085-8dd87b5fe560 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1129.291545] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-828fdbf1-36fb-4698-8085-8dd87b5fe560 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1129.291725] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-828fdbf1-36fb-4698-8085-8dd87b5fe560 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Deleting the datastore file [datastore2] e371ae6b-44fd-47ce-9c58-8981e7da5cbc {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1129.291970] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fef99472-e83a-4999-b36c-a1339d3c552f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.301039] env[68674]: DEBUG oslo_vmware.api [None req-828fdbf1-36fb-4698-8085-8dd87b5fe560 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for the task: (returnval){ [ 1129.301039] env[68674]: value = "task-3240904" [ 1129.301039] env[68674]: _type = "Task" [ 1129.301039] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.307702] env[68674]: DEBUG oslo_vmware.api [None req-828fdbf1-36fb-4698-8085-8dd87b5fe560 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240904, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.464278] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 245089a5-929d-49b0-aa36-749d342e8473] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1129.659288] env[68674]: INFO nova.compute.manager [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Took 15.83 seconds to build instance. [ 1129.800873] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7be6b6f1-1e61-499c-985b-151e08a0158d tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.056s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1129.814192] env[68674]: DEBUG oslo_vmware.api [None req-828fdbf1-36fb-4698-8085-8dd87b5fe560 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Task: {'id': task-3240904, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148105} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.814623] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-828fdbf1-36fb-4698-8085-8dd87b5fe560 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1129.814696] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-828fdbf1-36fb-4698-8085-8dd87b5fe560 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1129.815807] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-828fdbf1-36fb-4698-8085-8dd87b5fe560 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1129.815807] env[68674]: INFO nova.compute.manager [None req-828fdbf1-36fb-4698-8085-8dd87b5fe560 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1129.815807] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-828fdbf1-36fb-4698-8085-8dd87b5fe560 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1129.815807] env[68674]: DEBUG nova.compute.manager [-] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1129.816121] env[68674]: DEBUG nova.network.neutron [-] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1129.968427] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 2d02adff-9fbf-4889-99e4-4efde5a51b33] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1130.140868] env[68674]: DEBUG nova.network.neutron [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Port 15b18361-a1c9-4dab-bcaf-6a40837d6bbe binding to destination host cpu-1 is already ACTIVE {{(pid=68674) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1130.163418] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4f0ee00d-edaf-4d99-b9f6-ea9909e62ebb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "6cf18175-1436-4ba5-b4b3-8641ec6bdad1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.353s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1130.296569] env[68674]: DEBUG nova.compute.manager [req-a8374d9c-f199-4e61-98b6-0d6fb6a1f632 req-9dff929d-aa45-4a6c-9c38-f4da4466b80c service nova] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Received event network-vif-deleted-7188e58a-318a-4695-b262-797d48d71c63 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1130.300025] env[68674]: INFO nova.compute.manager [req-a8374d9c-f199-4e61-98b6-0d6fb6a1f632 req-9dff929d-aa45-4a6c-9c38-f4da4466b80c service nova] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Neutron deleted interface 7188e58a-318a-4695-b262-797d48d71c63; detaching it from the instance and deleting it from the info cache [ 1130.300025] env[68674]: DEBUG nova.network.neutron [req-a8374d9c-f199-4e61-98b6-0d6fb6a1f632 req-9dff929d-aa45-4a6c-9c38-f4da4466b80c service nova] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1130.356773] env[68674]: DEBUG nova.objects.instance [None req-859fae63-1b4e-4d45-948e-770ed00fc68e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lazy-loading 'flavor' on Instance uuid 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1130.427946] env[68674]: DEBUG oslo_concurrency.lockutils [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1130.428213] env[68674]: DEBUG oslo_concurrency.lockutils [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1130.474564] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 0e7c5243-ad33-4391-8977-b9019643e3de] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1130.665549] env[68674]: DEBUG nova.network.neutron [-] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1130.806389] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-be321c9d-d8e5-4594-9c78-3022079a6f8a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.813431] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cc8f274-517e-45c3-9c77-528de27eb553 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.850794] env[68674]: DEBUG nova.compute.manager [req-a8374d9c-f199-4e61-98b6-0d6fb6a1f632 req-9dff929d-aa45-4a6c-9c38-f4da4466b80c service nova] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Detach interface failed, port_id=7188e58a-318a-4695-b262-797d48d71c63, reason: Instance e371ae6b-44fd-47ce-9c58-8981e7da5cbc could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1130.867139] env[68674]: DEBUG oslo_concurrency.lockutils [None req-859fae63-1b4e-4d45-948e-770ed00fc68e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquiring lock "refresh_cache-142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1130.867139] env[68674]: DEBUG oslo_concurrency.lockutils [None req-859fae63-1b4e-4d45-948e-770ed00fc68e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquired lock "refresh_cache-142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1130.867139] env[68674]: DEBUG nova.network.neutron [None req-859fae63-1b4e-4d45-948e-770ed00fc68e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1130.867139] env[68674]: DEBUG nova.objects.instance [None req-859fae63-1b4e-4d45-948e-770ed00fc68e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lazy-loading 'info_cache' on Instance uuid 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1130.933910] env[68674]: DEBUG nova.compute.manager [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1130.983653] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 1e4e6ddb-1fb0-4813-bc6f-28525d3ea5d3] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1131.114484] env[68674]: DEBUG oslo_concurrency.lockutils [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Acquiring lock "2efe81dd-caa3-4fde-8be0-fbf399ce99e0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1131.114484] env[68674]: DEBUG oslo_concurrency.lockutils [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Lock "2efe81dd-caa3-4fde-8be0-fbf399ce99e0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1131.170033] env[68674]: DEBUG oslo_concurrency.lockutils [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "c876b288-de2a-4195-bfef-88f38e219d9a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1131.170033] env[68674]: DEBUG oslo_concurrency.lockutils [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "c876b288-de2a-4195-bfef-88f38e219d9a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1131.170033] env[68674]: DEBUG oslo_concurrency.lockutils [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "c876b288-de2a-4195-bfef-88f38e219d9a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1131.172087] env[68674]: INFO nova.compute.manager [-] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Took 1.36 seconds to deallocate network for instance. [ 1131.369744] env[68674]: DEBUG nova.objects.base [None req-859fae63-1b4e-4d45-948e-770ed00fc68e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Object Instance<142e8ede-90e2-47cf-a1b1-8c4fd59eed0a> lazy-loaded attributes: flavor,info_cache {{(pid=68674) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1131.381993] env[68674]: DEBUG oslo_concurrency.lockutils [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "6cf18175-1436-4ba5-b4b3-8641ec6bdad1" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1131.382298] env[68674]: DEBUG oslo_concurrency.lockutils [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "6cf18175-1436-4ba5-b4b3-8641ec6bdad1" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1131.382484] env[68674]: INFO nova.compute.manager [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Shelving [ 1131.455853] env[68674]: DEBUG oslo_concurrency.lockutils [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1131.456504] env[68674]: DEBUG oslo_concurrency.lockutils [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1131.458393] env[68674]: INFO nova.compute.claims [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1131.485064] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: f6d28c5e-fe32-4c53-98ac-747a1b79e6c4] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1131.617030] env[68674]: DEBUG nova.compute.manager [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1131.681806] env[68674]: DEBUG oslo_concurrency.lockutils [None req-828fdbf1-36fb-4698-8085-8dd87b5fe560 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1131.989491] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: f147b483-9384-4fc1-996e-e8fb035c1942] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1132.150752] env[68674]: DEBUG oslo_concurrency.lockutils [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1132.164456] env[68674]: DEBUG nova.network.neutron [None req-859fae63-1b4e-4d45-948e-770ed00fc68e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Updating instance_info_cache with network_info: [{"id": "4d94c698-e74c-4238-8f2e-ead75015687e", "address": "fa:16:3e:2c:a1:73", "network": {"id": "e5c1d0d2-3458-4788-9640-4e14ad781436", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1292108367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.240", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9bc5a5f88cdd441fbb0df17cab2fcecc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8cbc9b8f-ce19-4262-bf4d-88cd4f259a1c", "external-id": "nsx-vlan-transportzone-630", "segmentation_id": 630, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d94c698-e7", "ovs_interfaceid": "4d94c698-e74c-4238-8f2e-ead75015687e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1132.227648] env[68674]: DEBUG oslo_concurrency.lockutils [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "refresh_cache-c876b288-de2a-4195-bfef-88f38e219d9a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.227867] env[68674]: DEBUG oslo_concurrency.lockutils [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquired lock "refresh_cache-c876b288-de2a-4195-bfef-88f38e219d9a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1132.228062] env[68674]: DEBUG nova.network.neutron [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1132.392207] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1132.392554] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c36a47c9-36d7-422e-a5e0-92690b447847 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.400381] env[68674]: DEBUG oslo_vmware.api [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1132.400381] env[68674]: value = "task-3240905" [ 1132.400381] env[68674]: _type = "Task" [ 1132.400381] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.410111] env[68674]: DEBUG oslo_vmware.api [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240905, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.495631] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 7d953e59-53c1-4041-a641-35c12c012f7e] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1132.667851] env[68674]: DEBUG oslo_concurrency.lockutils [None req-859fae63-1b4e-4d45-948e-770ed00fc68e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Releasing lock "refresh_cache-142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1132.670963] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d614740b-2031-43dc-825a-77db16561f9b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.680150] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd440352-765d-4193-87ac-d0a7d2eeffcc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.711106] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b42c9789-4179-44cd-a267-69642903f736 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.719972] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d35eb9b-45ac-4170-8754-416a859d429c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.735614] env[68674]: DEBUG nova.compute.provider_tree [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1132.910788] env[68674]: DEBUG oslo_vmware.api [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240905, 'name': PowerOffVM_Task, 'duration_secs': 0.244345} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.911080] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1132.911881] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae9538e4-c611-428b-a27d-12756e9009af {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.933818] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c18668ac-cf6e-4d0c-b5b4-0bf9b327fa68 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.999922] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 036fbca7-be6a-43c6-972e-a71524833498] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1133.030325] env[68674]: DEBUG nova.network.neutron [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Updating instance_info_cache with network_info: [{"id": "15b18361-a1c9-4dab-bcaf-6a40837d6bbe", "address": "fa:16:3e:99:3b:04", "network": {"id": "eae87694-bbf6-4eed-9305-26be80e0529b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1262353116-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c958fcb56a934ef7919b76aa2a193429", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15b18361-a1", "ovs_interfaceid": "15b18361-a1c9-4dab-bcaf-6a40837d6bbe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1133.185375] env[68674]: DEBUG oslo_vmware.rw_handles [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525af175-b1ef-431f-e3d0-502f26b946b0/disk-0.vmdk. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1133.186321] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93bb5267-0315-4585-a223-7d76052eede4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.193611] env[68674]: DEBUG oslo_vmware.rw_handles [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525af175-b1ef-431f-e3d0-502f26b946b0/disk-0.vmdk is in state: ready. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1133.193782] env[68674]: ERROR oslo_vmware.rw_handles [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525af175-b1ef-431f-e3d0-502f26b946b0/disk-0.vmdk due to incomplete transfer. [ 1133.193996] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-cdb15b2d-3504-4e32-8bfa-7a5a10ae29cc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.201748] env[68674]: DEBUG oslo_vmware.rw_handles [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525af175-b1ef-431f-e3d0-502f26b946b0/disk-0.vmdk. {{(pid=68674) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1133.201748] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Uploaded image 5bc5480f-c581-4561-8a80-b3996f994c28 to the Glance image server {{(pid=68674) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1133.203927] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Destroying the VM {{(pid=68674) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1133.204188] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-f6183c0f-bd82-431e-acd0-62b7191e6621 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.209780] env[68674]: DEBUG oslo_vmware.api [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 1133.209780] env[68674]: value = "task-3240906" [ 1133.209780] env[68674]: _type = "Task" [ 1133.209780] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.217856] env[68674]: DEBUG oslo_vmware.api [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240906, 'name': Destroy_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.238747] env[68674]: DEBUG nova.scheduler.client.report [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1133.445116] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Creating Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1133.445471] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-5c83fb63-3728-4546-a0fc-e33b7c2af40b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.453204] env[68674]: DEBUG oslo_vmware.api [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1133.453204] env[68674]: value = "task-3240907" [ 1133.453204] env[68674]: _type = "Task" [ 1133.453204] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.460845] env[68674]: DEBUG oslo_vmware.api [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240907, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.502451] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 867fd9ca-049f-441a-94bc-af60df598043] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1133.533278] env[68674]: DEBUG oslo_concurrency.lockutils [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Releasing lock "refresh_cache-c876b288-de2a-4195-bfef-88f38e219d9a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1133.679089] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-859fae63-1b4e-4d45-948e-770ed00fc68e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1133.679443] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d0024376-7586-4c06-9832-1e7be53a85c5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.686437] env[68674]: DEBUG oslo_vmware.api [None req-859fae63-1b4e-4d45-948e-770ed00fc68e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for the task: (returnval){ [ 1133.686437] env[68674]: value = "task-3240908" [ 1133.686437] env[68674]: _type = "Task" [ 1133.686437] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.694608] env[68674]: DEBUG oslo_vmware.api [None req-859fae63-1b4e-4d45-948e-770ed00fc68e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3240908, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.719360] env[68674]: DEBUG oslo_vmware.api [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240906, 'name': Destroy_Task} progress is 33%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.743504] env[68674]: DEBUG oslo_concurrency.lockutils [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.287s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1133.744085] env[68674]: DEBUG nova.compute.manager [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1133.746966] env[68674]: DEBUG oslo_concurrency.lockutils [None req-828fdbf1-36fb-4698-8085-8dd87b5fe560 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.065s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1133.747200] env[68674]: DEBUG nova.objects.instance [None req-828fdbf1-36fb-4698-8085-8dd87b5fe560 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lazy-loading 'resources' on Instance uuid e371ae6b-44fd-47ce-9c58-8981e7da5cbc {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1133.962854] env[68674]: DEBUG oslo_vmware.api [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240907, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.005613] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 040d7108-8da1-4914-b7fd-03cf09ec68aa] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1134.043611] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8456303d-f195-4113-94e5-c3042d9409bb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.052023] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8396093-bde5-4c41-8b4f-abf493e5f92e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.196758] env[68674]: DEBUG oslo_vmware.api [None req-859fae63-1b4e-4d45-948e-770ed00fc68e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3240908, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.219639] env[68674]: DEBUG oslo_vmware.api [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240906, 'name': Destroy_Task, 'duration_secs': 0.701494} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.220718] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Destroyed the VM [ 1134.220718] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Deleting Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1134.220718] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-bc9d9643-6af4-4479-be9d-5e9f48e050ab {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.226962] env[68674]: DEBUG oslo_vmware.api [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 1134.226962] env[68674]: value = "task-3240909" [ 1134.226962] env[68674]: _type = "Task" [ 1134.226962] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.234476] env[68674]: DEBUG oslo_vmware.api [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240909, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.250150] env[68674]: DEBUG nova.compute.utils [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1134.254731] env[68674]: DEBUG nova.compute.manager [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1134.254731] env[68674]: DEBUG nova.network.neutron [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1134.290724] env[68674]: DEBUG nova.policy [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '34b9ef7eae4a4cceba2fa699ce38ac0d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6f2a133c72064227bd419d63d5d9557f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 1134.470082] env[68674]: DEBUG oslo_vmware.api [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240907, 'name': CreateSnapshot_Task, 'duration_secs': 0.944945} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.470384] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Created Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1134.471202] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc7799ee-71fb-4661-99e8-0854e43fa49f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.477773] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73c0c3c4-4ee9-4136-843f-0d8a67eabfd5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.490510] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44573e30-f63e-4e79-a8cc-0bb259b2bb2b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.519534] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 95386cdb-c2e4-476a-8aaf-e10fdc40b591] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1134.523518] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-831ed5ff-dca7-4c5b-8dcb-0e8a98b7f4ab {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.532017] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5983ac9c-4c2c-4e43-ac50-fa8e00ce913f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.547706] env[68674]: DEBUG nova.compute.provider_tree [None req-828fdbf1-36fb-4698-8085-8dd87b5fe560 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1134.697839] env[68674]: DEBUG oslo_vmware.api [None req-859fae63-1b4e-4d45-948e-770ed00fc68e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3240908, 'name': PowerOnVM_Task, 'duration_secs': 0.544224} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.698189] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-859fae63-1b4e-4d45-948e-770ed00fc68e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1134.698424] env[68674]: DEBUG nova.compute.manager [None req-859fae63-1b4e-4d45-948e-770ed00fc68e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1134.699264] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc62939e-61fa-4d70-a744-d39477499c81 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.715954] env[68674]: DEBUG nova.network.neutron [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Successfully created port: 47cc2f82-8285-4168-b696-407ade0efaaf {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1134.737539] env[68674]: DEBUG oslo_vmware.api [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240909, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.755767] env[68674]: DEBUG nova.compute.manager [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1134.994736] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Creating linked-clone VM from snapshot {{(pid=68674) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1134.995146] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-d6c720ae-167b-4bff-8015-07b52c7fdbb3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.003872] env[68674]: DEBUG oslo_vmware.api [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1135.003872] env[68674]: value = "task-3240910" [ 1135.003872] env[68674]: _type = "Task" [ 1135.003872] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.011992] env[68674]: DEBUG oslo_vmware.api [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240910, 'name': CloneVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.027246] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 3a0a7950-af31-4a20-a19d-44fbce8735a2] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1135.055024] env[68674]: DEBUG nova.scheduler.client.report [None req-828fdbf1-36fb-4698-8085-8dd87b5fe560 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1135.171439] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27f67fae-ba2d-4d8d-b73b-104f30d9ac6c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.190180] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c98b9172-18d5-4b7d-b3f4-0028f628e6fa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.197372] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Updating instance 'c876b288-de2a-4195-bfef-88f38e219d9a' progress to 83 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1135.237174] env[68674]: DEBUG oslo_vmware.api [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240909, 'name': RemoveSnapshot_Task, 'duration_secs': 0.830798} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.237452] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Deleted Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1135.237729] env[68674]: DEBUG nova.compute.manager [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1135.238570] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbe94285-079c-4508-a94c-5a8c99a850ad {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.517254] env[68674]: DEBUG oslo_vmware.api [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240910, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.529862] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: f3e7cacd-20d3-4dbe-89b0-80d89173069a] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1135.558439] env[68674]: DEBUG oslo_concurrency.lockutils [None req-828fdbf1-36fb-4698-8085-8dd87b5fe560 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.811s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1135.560966] env[68674]: DEBUG oslo_concurrency.lockutils [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.410s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1135.566743] env[68674]: INFO nova.compute.claims [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1135.583869] env[68674]: INFO nova.scheduler.client.report [None req-828fdbf1-36fb-4698-8085-8dd87b5fe560 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Deleted allocations for instance e371ae6b-44fd-47ce-9c58-8981e7da5cbc [ 1135.703667] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1135.703992] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-84cfe84a-9e80-4999-9762-80fb5cf1c127 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.711910] env[68674]: DEBUG oslo_vmware.api [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 1135.711910] env[68674]: value = "task-3240911" [ 1135.711910] env[68674]: _type = "Task" [ 1135.711910] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.721277] env[68674]: DEBUG oslo_vmware.api [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240911, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.751034] env[68674]: INFO nova.compute.manager [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Shelve offloading [ 1135.766374] env[68674]: DEBUG nova.compute.manager [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1135.809094] env[68674]: DEBUG nova.virt.hardware [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1135.809392] env[68674]: DEBUG nova.virt.hardware [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1135.809581] env[68674]: DEBUG nova.virt.hardware [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1135.809788] env[68674]: DEBUG nova.virt.hardware [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1135.809957] env[68674]: DEBUG nova.virt.hardware [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1135.810129] env[68674]: DEBUG nova.virt.hardware [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1135.810383] env[68674]: DEBUG nova.virt.hardware [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1135.810523] env[68674]: DEBUG nova.virt.hardware [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1135.810697] env[68674]: DEBUG nova.virt.hardware [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1135.810891] env[68674]: DEBUG nova.virt.hardware [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1135.811043] env[68674]: DEBUG nova.virt.hardware [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1135.811971] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8378eb5-6e6a-44b7-a6db-a9b77c0918a2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.820576] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c30caeca-9664-4eab-8c20-eb65d5e6be0d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.014968] env[68674]: DEBUG oslo_vmware.api [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240910, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.033166] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 63d6c185-db2c-4ede-a716-9a0dd432ab1f] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1136.093052] env[68674]: DEBUG oslo_concurrency.lockutils [None req-828fdbf1-36fb-4698-8085-8dd87b5fe560 tempest-ServersTestJSON-127608251 tempest-ServersTestJSON-127608251-project-member] Lock "e371ae6b-44fd-47ce-9c58-8981e7da5cbc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.932s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1136.185164] env[68674]: DEBUG nova.compute.manager [req-b8d7f9d9-bc42-4f7a-a9e4-3dc2b3b2bbd6 req-6f68987a-4359-42a3-b194-808c7353ea34 service nova] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Received event network-vif-plugged-47cc2f82-8285-4168-b696-407ade0efaaf {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1136.185469] env[68674]: DEBUG oslo_concurrency.lockutils [req-b8d7f9d9-bc42-4f7a-a9e4-3dc2b3b2bbd6 req-6f68987a-4359-42a3-b194-808c7353ea34 service nova] Acquiring lock "c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1136.185686] env[68674]: DEBUG oslo_concurrency.lockutils [req-b8d7f9d9-bc42-4f7a-a9e4-3dc2b3b2bbd6 req-6f68987a-4359-42a3-b194-808c7353ea34 service nova] Lock "c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1136.185856] env[68674]: DEBUG oslo_concurrency.lockutils [req-b8d7f9d9-bc42-4f7a-a9e4-3dc2b3b2bbd6 req-6f68987a-4359-42a3-b194-808c7353ea34 service nova] Lock "c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1136.186340] env[68674]: DEBUG nova.compute.manager [req-b8d7f9d9-bc42-4f7a-a9e4-3dc2b3b2bbd6 req-6f68987a-4359-42a3-b194-808c7353ea34 service nova] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] No waiting events found dispatching network-vif-plugged-47cc2f82-8285-4168-b696-407ade0efaaf {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1136.186551] env[68674]: WARNING nova.compute.manager [req-b8d7f9d9-bc42-4f7a-a9e4-3dc2b3b2bbd6 req-6f68987a-4359-42a3-b194-808c7353ea34 service nova] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Received unexpected event network-vif-plugged-47cc2f82-8285-4168-b696-407ade0efaaf for instance with vm_state building and task_state spawning. [ 1136.226767] env[68674]: DEBUG oslo_vmware.api [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240911, 'name': PowerOnVM_Task, 'duration_secs': 0.412837} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.227058] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1136.227298] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-67b0dace-8723-4925-845a-0973b785ebfd tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Updating instance 'c876b288-de2a-4195-bfef-88f38e219d9a' progress to 100 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1136.257533] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1136.257812] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fb0eb936-11d3-4bb2-a87f-9895a6fb3bce {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.270367] env[68674]: DEBUG oslo_vmware.api [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 1136.270367] env[68674]: value = "task-3240912" [ 1136.270367] env[68674]: _type = "Task" [ 1136.270367] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.280109] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] VM already powered off {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1136.280487] env[68674]: DEBUG nova.compute.manager [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1136.281374] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fc46c67-aa43-4406-b3d2-99a53931a3c6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.287721] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquiring lock "refresh_cache-ba4bfbb4-a89b-4ab6-964e-792647fd5a89" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.287911] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquired lock "refresh_cache-ba4bfbb4-a89b-4ab6-964e-792647fd5a89" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1136.288112] env[68674]: DEBUG nova.network.neutron [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1136.516282] env[68674]: DEBUG oslo_vmware.api [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240910, 'name': CloneVM_Task, 'duration_secs': 1.483439} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.516711] env[68674]: INFO nova.virt.vmwareapi.vmops [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Created linked-clone VM from snapshot [ 1136.517067] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f98d7a8-dc1d-45cb-a4ca-4dc2998f7c48 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.520793] env[68674]: DEBUG nova.network.neutron [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Successfully updated port: 47cc2f82-8285-4168-b696-407ade0efaaf {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1136.527601] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Uploading image b7ab22f3-f5be-4404-8582-a3a1de837a28 {{(pid=68674) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1136.536302] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 2ed83aff-9a73-464b-914a-479d91cdfce0] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1136.549456] env[68674]: DEBUG oslo_vmware.rw_handles [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1136.549456] env[68674]: value = "vm-647698" [ 1136.549456] env[68674]: _type = "VirtualMachine" [ 1136.549456] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1136.549992] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-7185bb92-aaac-4da5-9486-43e3f4785a86 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.558548] env[68674]: DEBUG oslo_vmware.rw_handles [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lease: (returnval){ [ 1136.558548] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527b69d1-58c8-0d65-6ccf-8853bec627b0" [ 1136.558548] env[68674]: _type = "HttpNfcLease" [ 1136.558548] env[68674]: } obtained for exporting VM: (result){ [ 1136.558548] env[68674]: value = "vm-647698" [ 1136.558548] env[68674]: _type = "VirtualMachine" [ 1136.558548] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1136.558844] env[68674]: DEBUG oslo_vmware.api [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the lease: (returnval){ [ 1136.558844] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527b69d1-58c8-0d65-6ccf-8853bec627b0" [ 1136.558844] env[68674]: _type = "HttpNfcLease" [ 1136.558844] env[68674]: } to be ready. {{(pid=68674) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1136.566402] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1136.566402] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527b69d1-58c8-0d65-6ccf-8853bec627b0" [ 1136.566402] env[68674]: _type = "HttpNfcLease" [ 1136.566402] env[68674]: } is initializing. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1136.775779] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4738102e-a79b-43fa-b01b-d7885cf3b3cc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.787937] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dad945ad-9606-4e94-875c-f4788c096bb5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.820679] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23f1ff0f-131b-495a-b2a8-b525759ff585 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.828196] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3898a86-dbac-4868-b60d-e8c3e27d7eaf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.841729] env[68674]: DEBUG nova.compute.provider_tree [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1137.025543] env[68674]: DEBUG oslo_concurrency.lockutils [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "refresh_cache-c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1137.025688] env[68674]: DEBUG oslo_concurrency.lockutils [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquired lock "refresh_cache-c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1137.025839] env[68674]: DEBUG nova.network.neutron [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1137.039121] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 7aa58e2f-1202-4252-9c38-ce53084c573f] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1137.072028] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1137.072028] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527b69d1-58c8-0d65-6ccf-8853bec627b0" [ 1137.072028] env[68674]: _type = "HttpNfcLease" [ 1137.072028] env[68674]: } is ready. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1137.072028] env[68674]: DEBUG oslo_vmware.rw_handles [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1137.072028] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527b69d1-58c8-0d65-6ccf-8853bec627b0" [ 1137.072028] env[68674]: _type = "HttpNfcLease" [ 1137.072028] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1137.072542] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4632c0a-2d9b-4f08-b1a5-4f11a5287b3f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.079692] env[68674]: DEBUG oslo_vmware.rw_handles [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524a47e8-9e70-badf-0ba0-32b2d0c80174/disk-0.vmdk from lease info. {{(pid=68674) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1137.079861] env[68674]: DEBUG oslo_vmware.rw_handles [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524a47e8-9e70-badf-0ba0-32b2d0c80174/disk-0.vmdk for reading. {{(pid=68674) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1137.270566] env[68674]: DEBUG nova.network.neutron [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Updating instance_info_cache with network_info: [{"id": "7309bd2b-c077-4257-8efb-bf6e8d516ab7", "address": "fa:16:3e:e4:5d:a7", "network": {"id": "e4b29de6-94e6-452e-b362-eb8d7dd615b9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2121858122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2bca98e5a30741249b1bdee899ffe433", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721c6720-3ce0-450e-9951-a894f03acc27", "external-id": "nsx-vlan-transportzone-394", "segmentation_id": 394, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7309bd2b-c0", "ovs_interfaceid": "7309bd2b-c077-4257-8efb-bf6e8d516ab7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1137.308471] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-8fb4266f-4e0e-48ea-b071-c6e59647c84d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.345552] env[68674]: DEBUG nova.scheduler.client.report [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1137.543410] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: a4cb1632-eada-4b10-a66f-64fecf45fd76] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1137.592171] env[68674]: DEBUG nova.network.neutron [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1137.773861] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Releasing lock "refresh_cache-ba4bfbb4-a89b-4ab6-964e-792647fd5a89" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1137.858326] env[68674]: DEBUG oslo_concurrency.lockutils [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.297s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1137.858969] env[68674]: DEBUG nova.compute.manager [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1137.899673] env[68674]: DEBUG nova.network.neutron [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Updating instance_info_cache with network_info: [{"id": "47cc2f82-8285-4168-b696-407ade0efaaf", "address": "fa:16:3e:90:e0:85", "network": {"id": "f2a6b57a-fec9-4bd2-9828-2b72f21f2393", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1479923638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f2a133c72064227bd419d63d5d9557f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47cc2f82-82", "ovs_interfaceid": "47cc2f82-8285-4168-b696-407ade0efaaf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1138.048775] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 5bd42044-84f5-4f48-aa97-b7cf990ed35d] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1138.223054] env[68674]: DEBUG nova.compute.manager [req-4ea521a5-8fdb-42df-afd4-6b1221fcd791 req-84cc3f0c-a43e-4224-b992-9ae888995e2d service nova] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Received event network-changed-47cc2f82-8285-4168-b696-407ade0efaaf {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1138.223526] env[68674]: DEBUG nova.compute.manager [req-4ea521a5-8fdb-42df-afd4-6b1221fcd791 req-84cc3f0c-a43e-4224-b992-9ae888995e2d service nova] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Refreshing instance network info cache due to event network-changed-47cc2f82-8285-4168-b696-407ade0efaaf. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1138.223919] env[68674]: DEBUG oslo_concurrency.lockutils [req-4ea521a5-8fdb-42df-afd4-6b1221fcd791 req-84cc3f0c-a43e-4224-b992-9ae888995e2d service nova] Acquiring lock "refresh_cache-c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1138.285309] env[68674]: DEBUG nova.compute.manager [req-5010f753-91a8-489a-884a-0c3ca1a348e3 req-951914af-d90e-444f-937c-a7530b9f803b service nova] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Received event network-vif-unplugged-7309bd2b-c077-4257-8efb-bf6e8d516ab7 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1138.285638] env[68674]: DEBUG oslo_concurrency.lockutils [req-5010f753-91a8-489a-884a-0c3ca1a348e3 req-951914af-d90e-444f-937c-a7530b9f803b service nova] Acquiring lock "ba4bfbb4-a89b-4ab6-964e-792647fd5a89-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1138.285875] env[68674]: DEBUG oslo_concurrency.lockutils [req-5010f753-91a8-489a-884a-0c3ca1a348e3 req-951914af-d90e-444f-937c-a7530b9f803b service nova] Lock "ba4bfbb4-a89b-4ab6-964e-792647fd5a89-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1138.286246] env[68674]: DEBUG oslo_concurrency.lockutils [req-5010f753-91a8-489a-884a-0c3ca1a348e3 req-951914af-d90e-444f-937c-a7530b9f803b service nova] Lock "ba4bfbb4-a89b-4ab6-964e-792647fd5a89-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1138.286561] env[68674]: DEBUG nova.compute.manager [req-5010f753-91a8-489a-884a-0c3ca1a348e3 req-951914af-d90e-444f-937c-a7530b9f803b service nova] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] No waiting events found dispatching network-vif-unplugged-7309bd2b-c077-4257-8efb-bf6e8d516ab7 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1138.286742] env[68674]: WARNING nova.compute.manager [req-5010f753-91a8-489a-884a-0c3ca1a348e3 req-951914af-d90e-444f-937c-a7530b9f803b service nova] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Received unexpected event network-vif-unplugged-7309bd2b-c077-4257-8efb-bf6e8d516ab7 for instance with vm_state shelved and task_state shelving_offloading. [ 1138.321511] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquiring lock "9b8aad00-0980-4752-954a-c09c9ae6f9ec" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1138.321751] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lock "9b8aad00-0980-4752-954a-c09c9ae6f9ec" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1138.321941] env[68674]: INFO nova.compute.manager [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Shelving [ 1138.365197] env[68674]: DEBUG nova.compute.utils [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1138.367012] env[68674]: DEBUG nova.compute.manager [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1138.371887] env[68674]: DEBUG nova.network.neutron [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1138.404417] env[68674]: DEBUG oslo_concurrency.lockutils [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Releasing lock "refresh_cache-c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1138.404885] env[68674]: DEBUG nova.compute.manager [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Instance network_info: |[{"id": "47cc2f82-8285-4168-b696-407ade0efaaf", "address": "fa:16:3e:90:e0:85", "network": {"id": "f2a6b57a-fec9-4bd2-9828-2b72f21f2393", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1479923638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f2a133c72064227bd419d63d5d9557f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47cc2f82-82", "ovs_interfaceid": "47cc2f82-8285-4168-b696-407ade0efaaf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1138.405532] env[68674]: DEBUG oslo_concurrency.lockutils [req-4ea521a5-8fdb-42df-afd4-6b1221fcd791 req-84cc3f0c-a43e-4224-b992-9ae888995e2d service nova] Acquired lock "refresh_cache-c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1138.405721] env[68674]: DEBUG nova.network.neutron [req-4ea521a5-8fdb-42df-afd4-6b1221fcd791 req-84cc3f0c-a43e-4224-b992-9ae888995e2d service nova] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Refreshing network info cache for port 47cc2f82-8285-4168-b696-407ade0efaaf {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1138.407235] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:90:e0:85', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8868dc2-7767-49c0-a2ed-e611fcbf8414', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '47cc2f82-8285-4168-b696-407ade0efaaf', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1138.417705] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1138.421470] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1138.422173] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-133b835b-a24c-44a0-bb46-a7b550c1f22b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.444889] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1138.444889] env[68674]: value = "task-3240914" [ 1138.444889] env[68674]: _type = "Task" [ 1138.444889] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.450928] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1138.451796] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44497311-45cb-4cd1-9665-df2da2fdc5c3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.457981] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240914, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.462653] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1138.462942] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7bf76686-0363-46fd-b647-9cf9f3603c5f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.533295] env[68674]: DEBUG nova.policy [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc1277a660b040b08b95d61e03bbb65f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8e73e759715a4e39a03bd234d918b2fb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 1138.541309] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1138.541491] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1138.541704] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Deleting the datastore file [datastore1] ba4bfbb4-a89b-4ab6-964e-792647fd5a89 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1138.542303] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-880a47a5-14ac-4725-a7fe-6866a30cde21 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.549881] env[68674]: DEBUG oslo_vmware.api [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 1138.549881] env[68674]: value = "task-3240916" [ 1138.549881] env[68674]: _type = "Task" [ 1138.549881] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.554393] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: e1283f87-5bdb-4d4e-a1c5-f3b1c9180188] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1138.562663] env[68674]: DEBUG oslo_vmware.api [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240916, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.878249] env[68674]: DEBUG nova.compute.manager [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1138.958312] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240914, 'name': CreateVM_Task, 'duration_secs': 0.414587} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.958503] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1138.959249] env[68674]: DEBUG oslo_concurrency.lockutils [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1138.959429] env[68674]: DEBUG oslo_concurrency.lockutils [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1138.959773] env[68674]: DEBUG oslo_concurrency.lockutils [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1138.960059] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5bd672e-cbb6-4de3-9010-e2b45d04b9b5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.965512] env[68674]: DEBUG oslo_vmware.api [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1138.965512] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a367ed-000c-65f2-215e-6d565fc80292" [ 1138.965512] env[68674]: _type = "Task" [ 1138.965512] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.973603] env[68674]: DEBUG oslo_vmware.api [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a367ed-000c-65f2-215e-6d565fc80292, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.974442] env[68674]: DEBUG nova.network.neutron [req-4ea521a5-8fdb-42df-afd4-6b1221fcd791 req-84cc3f0c-a43e-4224-b992-9ae888995e2d service nova] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Updated VIF entry in instance network info cache for port 47cc2f82-8285-4168-b696-407ade0efaaf. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1138.974765] env[68674]: DEBUG nova.network.neutron [req-4ea521a5-8fdb-42df-afd4-6b1221fcd791 req-84cc3f0c-a43e-4224-b992-9ae888995e2d service nova] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Updating instance_info_cache with network_info: [{"id": "47cc2f82-8285-4168-b696-407ade0efaaf", "address": "fa:16:3e:90:e0:85", "network": {"id": "f2a6b57a-fec9-4bd2-9828-2b72f21f2393", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1479923638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f2a133c72064227bd419d63d5d9557f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47cc2f82-82", "ovs_interfaceid": "47cc2f82-8285-4168-b696-407ade0efaaf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1139.060130] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: c4fd04a7-2b11-4c4b-84d1-53edc1e3f035] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1139.061925] env[68674]: DEBUG oslo_vmware.api [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240916, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.227509} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.062367] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1139.063752] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1139.063752] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1139.096400] env[68674]: INFO nova.scheduler.client.report [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Deleted allocations for instance ba4bfbb4-a89b-4ab6-964e-792647fd5a89 [ 1139.142176] env[68674]: DEBUG nova.network.neutron [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Successfully created port: 76984c11-ddde-4f48-8818-0911510f8452 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1139.245840] env[68674]: DEBUG oslo_concurrency.lockutils [None req-da63f90e-2159-40fd-8c14-c7bfd1d2be03 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "c876b288-de2a-4195-bfef-88f38e219d9a" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1139.246126] env[68674]: DEBUG oslo_concurrency.lockutils [None req-da63f90e-2159-40fd-8c14-c7bfd1d2be03 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "c876b288-de2a-4195-bfef-88f38e219d9a" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1139.246344] env[68674]: DEBUG nova.compute.manager [None req-da63f90e-2159-40fd-8c14-c7bfd1d2be03 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Going to confirm migration 7 {{(pid=68674) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1139.335017] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1139.335617] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fb06367b-3e95-40ed-823c-a99ae43b0ce9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.343482] env[68674]: DEBUG oslo_vmware.api [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1139.343482] env[68674]: value = "task-3240917" [ 1139.343482] env[68674]: _type = "Task" [ 1139.343482] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.352218] env[68674]: DEBUG oslo_vmware.api [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240917, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.478019] env[68674]: DEBUG oslo_concurrency.lockutils [req-4ea521a5-8fdb-42df-afd4-6b1221fcd791 req-84cc3f0c-a43e-4224-b992-9ae888995e2d service nova] Releasing lock "refresh_cache-c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1139.478596] env[68674]: DEBUG oslo_vmware.api [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a367ed-000c-65f2-215e-6d565fc80292, 'name': SearchDatastore_Task, 'duration_secs': 0.010746} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.479032] env[68674]: DEBUG oslo_concurrency.lockutils [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1139.479386] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1139.479714] env[68674]: DEBUG oslo_concurrency.lockutils [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.479880] env[68674]: DEBUG oslo_concurrency.lockutils [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1139.480427] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1139.481102] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-527ef594-8cde-49f7-93d5-8f56df74cebd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.490854] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1139.491117] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1139.491919] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e4f7f69-4b82-4b93-98b7-995b5b4d98ef {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.497784] env[68674]: DEBUG oslo_vmware.api [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1139.497784] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5281afb4-4ec9-e50d-5512-9b5d5fc0812c" [ 1139.497784] env[68674]: _type = "Task" [ 1139.497784] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.502062] env[68674]: DEBUG nova.network.neutron [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Successfully created port: 99e1922b-5bc3-4df6-aa48-4502164af67c {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1139.509013] env[68674]: DEBUG oslo_vmware.api [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5281afb4-4ec9-e50d-5512-9b5d5fc0812c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.563719] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: f69c5fcf-6d25-48a5-a154-c3632c76175a] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1139.601058] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1139.601396] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1139.601656] env[68674]: DEBUG nova.objects.instance [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lazy-loading 'resources' on Instance uuid ba4bfbb4-a89b-4ab6-964e-792647fd5a89 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1139.755212] env[68674]: DEBUG nova.network.neutron [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Successfully created port: 1d39a196-8312-4fd9-b20f-b5ee8c06556b {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1139.796677] env[68674]: DEBUG oslo_concurrency.lockutils [None req-da63f90e-2159-40fd-8c14-c7bfd1d2be03 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "refresh_cache-c876b288-de2a-4195-bfef-88f38e219d9a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.796884] env[68674]: DEBUG oslo_concurrency.lockutils [None req-da63f90e-2159-40fd-8c14-c7bfd1d2be03 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquired lock "refresh_cache-c876b288-de2a-4195-bfef-88f38e219d9a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1139.797123] env[68674]: DEBUG nova.network.neutron [None req-da63f90e-2159-40fd-8c14-c7bfd1d2be03 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1139.797330] env[68674]: DEBUG nova.objects.instance [None req-da63f90e-2159-40fd-8c14-c7bfd1d2be03 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lazy-loading 'info_cache' on Instance uuid c876b288-de2a-4195-bfef-88f38e219d9a {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1139.853202] env[68674]: DEBUG oslo_vmware.api [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240917, 'name': PowerOffVM_Task, 'duration_secs': 0.192626} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.853464] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1139.854261] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d1a1c5a-c120-4edf-b52f-4c36f7311184 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.873426] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41d92133-b846-4177-be0e-0caf797d7cfd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.886813] env[68674]: DEBUG nova.compute.manager [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1139.912294] env[68674]: DEBUG nova.virt.hardware [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1139.912559] env[68674]: DEBUG nova.virt.hardware [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1139.912733] env[68674]: DEBUG nova.virt.hardware [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1139.912890] env[68674]: DEBUG nova.virt.hardware [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1139.913067] env[68674]: DEBUG nova.virt.hardware [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1139.913232] env[68674]: DEBUG nova.virt.hardware [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1139.913448] env[68674]: DEBUG nova.virt.hardware [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1139.913610] env[68674]: DEBUG nova.virt.hardware [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1139.913804] env[68674]: DEBUG nova.virt.hardware [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1139.913942] env[68674]: DEBUG nova.virt.hardware [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1139.914145] env[68674]: DEBUG nova.virt.hardware [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1139.914989] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bda17c6-5573-484c-b484-34fadc1aa5c1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.923903] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d134e7c7-9e51-4b28-8c03-8935f3d346d5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.008968] env[68674]: DEBUG oslo_vmware.api [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5281afb4-4ec9-e50d-5512-9b5d5fc0812c, 'name': SearchDatastore_Task, 'duration_secs': 0.017541} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.009769] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06905f9f-3d6e-4771-8fe0-9e73b4d32597 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.015505] env[68674]: DEBUG oslo_vmware.api [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1140.015505] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]529468e5-1583-5961-0778-815ffe2fac30" [ 1140.015505] env[68674]: _type = "Task" [ 1140.015505] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.023946] env[68674]: DEBUG oslo_vmware.api [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]529468e5-1583-5961-0778-815ffe2fac30, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.067365] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 5c12cb5d-821c-4e63-86a0-dadc9794a8ba] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1140.103836] env[68674]: DEBUG nova.objects.instance [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lazy-loading 'numa_topology' on Instance uuid ba4bfbb4-a89b-4ab6-964e-792647fd5a89 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1140.318942] env[68674]: DEBUG nova.compute.manager [req-0b96ec00-c457-42b4-ad84-329c8b632cb2 req-0e0d2337-636c-4d5c-8cb5-ae79c0873503 service nova] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Received event network-changed-7309bd2b-c077-4257-8efb-bf6e8d516ab7 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1140.319180] env[68674]: DEBUG nova.compute.manager [req-0b96ec00-c457-42b4-ad84-329c8b632cb2 req-0e0d2337-636c-4d5c-8cb5-ae79c0873503 service nova] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Refreshing instance network info cache due to event network-changed-7309bd2b-c077-4257-8efb-bf6e8d516ab7. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1140.319769] env[68674]: DEBUG oslo_concurrency.lockutils [req-0b96ec00-c457-42b4-ad84-329c8b632cb2 req-0e0d2337-636c-4d5c-8cb5-ae79c0873503 service nova] Acquiring lock "refresh_cache-ba4bfbb4-a89b-4ab6-964e-792647fd5a89" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.319769] env[68674]: DEBUG oslo_concurrency.lockutils [req-0b96ec00-c457-42b4-ad84-329c8b632cb2 req-0e0d2337-636c-4d5c-8cb5-ae79c0873503 service nova] Acquired lock "refresh_cache-ba4bfbb4-a89b-4ab6-964e-792647fd5a89" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1140.319769] env[68674]: DEBUG nova.network.neutron [req-0b96ec00-c457-42b4-ad84-329c8b632cb2 req-0e0d2337-636c-4d5c-8cb5-ae79c0873503 service nova] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Refreshing network info cache for port 7309bd2b-c077-4257-8efb-bf6e8d516ab7 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1140.384595] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Creating Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1140.384922] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-6ac67ef3-5d7c-45aa-a4a7-70a43264ffb7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.392679] env[68674]: DEBUG oslo_vmware.api [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1140.392679] env[68674]: value = "task-3240918" [ 1140.392679] env[68674]: _type = "Task" [ 1140.392679] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.402353] env[68674]: DEBUG oslo_vmware.api [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240918, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.526215] env[68674]: DEBUG oslo_vmware.api [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]529468e5-1583-5961-0778-815ffe2fac30, 'name': SearchDatastore_Task, 'duration_secs': 0.010767} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.526524] env[68674]: DEBUG oslo_concurrency.lockutils [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1140.526799] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f/c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1140.527092] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f96a905b-06e8-4c2c-9a4c-1f84a53d4593 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.533773] env[68674]: DEBUG oslo_vmware.api [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1140.533773] env[68674]: value = "task-3240919" [ 1140.533773] env[68674]: _type = "Task" [ 1140.533773] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.542144] env[68674]: DEBUG oslo_vmware.api [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3240919, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.570890] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: f029042f-d80b-453e-adc9-1e65d7da7aaf] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1140.605682] env[68674]: DEBUG nova.objects.base [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=68674) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1140.775316] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f66cc75e-b336-4146-a8cf-d751ef9e17cd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.783552] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f249f2-0e6c-4b3a-a584-58f656d1f1d4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.817831] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74d44a55-8c46-4cd7-8070-c64b248b6263 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.829381] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd365c76-79e3-4105-b2ec-fa15339cc108 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.845240] env[68674]: DEBUG nova.compute.provider_tree [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1140.912145] env[68674]: DEBUG oslo_vmware.api [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240918, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.045970] env[68674]: DEBUG oslo_vmware.api [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3240919, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.074244] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 5e3f667c-5d3a-4465-9186-779563087480] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1141.180989] env[68674]: DEBUG nova.network.neutron [None req-da63f90e-2159-40fd-8c14-c7bfd1d2be03 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Updating instance_info_cache with network_info: [{"id": "15b18361-a1c9-4dab-bcaf-6a40837d6bbe", "address": "fa:16:3e:99:3b:04", "network": {"id": "eae87694-bbf6-4eed-9305-26be80e0529b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1262353116-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c958fcb56a934ef7919b76aa2a193429", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15b18361-a1", "ovs_interfaceid": "15b18361-a1c9-4dab-bcaf-6a40837d6bbe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1141.182863] env[68674]: DEBUG nova.network.neutron [req-0b96ec00-c457-42b4-ad84-329c8b632cb2 req-0e0d2337-636c-4d5c-8cb5-ae79c0873503 service nova] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Updated VIF entry in instance network info cache for port 7309bd2b-c077-4257-8efb-bf6e8d516ab7. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1141.183352] env[68674]: DEBUG nova.network.neutron [req-0b96ec00-c457-42b4-ad84-329c8b632cb2 req-0e0d2337-636c-4d5c-8cb5-ae79c0873503 service nova] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Updating instance_info_cache with network_info: [{"id": "7309bd2b-c077-4257-8efb-bf6e8d516ab7", "address": "fa:16:3e:e4:5d:a7", "network": {"id": "e4b29de6-94e6-452e-b362-eb8d7dd615b9", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-2121858122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2bca98e5a30741249b1bdee899ffe433", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap7309bd2b-c0", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1141.349469] env[68674]: DEBUG nova.scheduler.client.report [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1141.404088] env[68674]: DEBUG oslo_vmware.api [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240918, 'name': CreateSnapshot_Task, 'duration_secs': 0.892848} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.404413] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Created Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1141.405199] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cb023e5-131b-4316-b492-aef3a7f26acc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.424541] env[68674]: DEBUG nova.compute.manager [req-72a037f6-777c-4fd2-9dcd-56d70deb72c0 req-44521b72-26ee-4a9a-acdf-a02b255a0704 service nova] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Received event network-vif-plugged-76984c11-ddde-4f48-8818-0911510f8452 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1141.424741] env[68674]: DEBUG oslo_concurrency.lockutils [req-72a037f6-777c-4fd2-9dcd-56d70deb72c0 req-44521b72-26ee-4a9a-acdf-a02b255a0704 service nova] Acquiring lock "2efe81dd-caa3-4fde-8be0-fbf399ce99e0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.424980] env[68674]: DEBUG oslo_concurrency.lockutils [req-72a037f6-777c-4fd2-9dcd-56d70deb72c0 req-44521b72-26ee-4a9a-acdf-a02b255a0704 service nova] Lock "2efe81dd-caa3-4fde-8be0-fbf399ce99e0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.425169] env[68674]: DEBUG oslo_concurrency.lockutils [req-72a037f6-777c-4fd2-9dcd-56d70deb72c0 req-44521b72-26ee-4a9a-acdf-a02b255a0704 service nova] Lock "2efe81dd-caa3-4fde-8be0-fbf399ce99e0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.425384] env[68674]: DEBUG nova.compute.manager [req-72a037f6-777c-4fd2-9dcd-56d70deb72c0 req-44521b72-26ee-4a9a-acdf-a02b255a0704 service nova] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] No waiting events found dispatching network-vif-plugged-76984c11-ddde-4f48-8818-0911510f8452 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1141.425586] env[68674]: WARNING nova.compute.manager [req-72a037f6-777c-4fd2-9dcd-56d70deb72c0 req-44521b72-26ee-4a9a-acdf-a02b255a0704 service nova] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Received unexpected event network-vif-plugged-76984c11-ddde-4f48-8818-0911510f8452 for instance with vm_state building and task_state spawning. [ 1141.516156] env[68674]: DEBUG nova.network.neutron [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Successfully updated port: 76984c11-ddde-4f48-8818-0911510f8452 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1141.545356] env[68674]: DEBUG oslo_vmware.api [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3240919, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.648262} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.545581] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f/c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1141.546070] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1141.546070] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2052ade9-5957-4834-89a6-fbe5c5012d55 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.552987] env[68674]: DEBUG oslo_vmware.api [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1141.552987] env[68674]: value = "task-3240920" [ 1141.552987] env[68674]: _type = "Task" [ 1141.552987] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.563511] env[68674]: DEBUG oslo_vmware.api [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3240920, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.578074] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 6803af03-b1d5-47e6-9471-5213469e4103] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1141.688039] env[68674]: DEBUG oslo_concurrency.lockutils [req-0b96ec00-c457-42b4-ad84-329c8b632cb2 req-0e0d2337-636c-4d5c-8cb5-ae79c0873503 service nova] Releasing lock "refresh_cache-ba4bfbb4-a89b-4ab6-964e-792647fd5a89" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1141.688646] env[68674]: DEBUG oslo_concurrency.lockutils [None req-da63f90e-2159-40fd-8c14-c7bfd1d2be03 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Releasing lock "refresh_cache-c876b288-de2a-4195-bfef-88f38e219d9a" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1141.688956] env[68674]: DEBUG nova.objects.instance [None req-da63f90e-2159-40fd-8c14-c7bfd1d2be03 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lazy-loading 'migration_context' on Instance uuid c876b288-de2a-4195-bfef-88f38e219d9a {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1141.855895] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.254s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.927731] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Creating linked-clone VM from snapshot {{(pid=68674) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1141.928433] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-d602013b-9953-4c5d-87f6-8c4aeb2c6366 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.937109] env[68674]: DEBUG oslo_vmware.api [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1141.937109] env[68674]: value = "task-3240921" [ 1141.937109] env[68674]: _type = "Task" [ 1141.937109] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.945373] env[68674]: DEBUG oslo_vmware.api [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240921, 'name': CloneVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.064842] env[68674]: DEBUG oslo_vmware.api [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3240920, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076809} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.065024] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1142.066015] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-934c54da-0cc5-4118-a23a-fd50732bed88 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.086830] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 0eaf7d72-755b-4977-8f71-7d53ad1cf573] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1142.103085] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Reconfiguring VM instance instance-00000075 to attach disk [datastore2] c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f/c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1142.103857] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dca4f39d-2154-4c20-bdce-3676a2904c91 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.134305] env[68674]: DEBUG oslo_vmware.api [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1142.134305] env[68674]: value = "task-3240922" [ 1142.134305] env[68674]: _type = "Task" [ 1142.134305] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.146931] env[68674]: DEBUG oslo_vmware.api [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3240922, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.191732] env[68674]: DEBUG nova.objects.base [None req-da63f90e-2159-40fd-8c14-c7bfd1d2be03 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=68674) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1142.192808] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74c7a426-9281-4eb8-9405-c24b2f088e03 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.213558] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-acdf684e-0650-4740-bcb9-5d07a66cc86f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.219851] env[68674]: DEBUG oslo_vmware.api [None req-da63f90e-2159-40fd-8c14-c7bfd1d2be03 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 1142.219851] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5263d683-c554-cc6a-f80c-be2d16c4f7f7" [ 1142.219851] env[68674]: _type = "Task" [ 1142.219851] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.230658] env[68674]: DEBUG oslo_vmware.api [None req-da63f90e-2159-40fd-8c14-c7bfd1d2be03 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5263d683-c554-cc6a-f80c-be2d16c4f7f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.255517] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquiring lock "ba4bfbb4-a89b-4ab6-964e-792647fd5a89" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1142.368653] env[68674]: DEBUG oslo_concurrency.lockutils [None req-7bfa7120-6de8-47ef-a0ad-ab2be1aa8b4c tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "ba4bfbb4-a89b-4ab6-964e-792647fd5a89" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 26.004s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1142.368653] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "ba4bfbb4-a89b-4ab6-964e-792647fd5a89" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.113s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1142.368653] env[68674]: INFO nova.compute.manager [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Unshelving [ 1142.449556] env[68674]: DEBUG oslo_vmware.api [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240921, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.605218] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 3b90fce5-5d6c-471b-bf2b-e9f4ac11e4ca] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1142.645510] env[68674]: DEBUG oslo_vmware.api [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3240922, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.730418] env[68674]: DEBUG oslo_vmware.api [None req-da63f90e-2159-40fd-8c14-c7bfd1d2be03 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5263d683-c554-cc6a-f80c-be2d16c4f7f7, 'name': SearchDatastore_Task, 'duration_secs': 0.008588} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.730611] env[68674]: DEBUG oslo_concurrency.lockutils [None req-da63f90e-2159-40fd-8c14-c7bfd1d2be03 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1142.730714] env[68674]: DEBUG oslo_concurrency.lockutils [None req-da63f90e-2159-40fd-8c14-c7bfd1d2be03 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1142.948959] env[68674]: DEBUG oslo_vmware.api [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240921, 'name': CloneVM_Task} progress is 95%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.109346] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 77fa5a89-961b-4c84-a75e-a5be0253677e] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1143.144075] env[68674]: DEBUG oslo_vmware.api [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3240922, 'name': ReconfigVM_Task, 'duration_secs': 0.589655} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.144372] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Reconfigured VM instance instance-00000075 to attach disk [datastore2] c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f/c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1143.144989] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ded27118-e102-4768-8f7a-9b5b64625f74 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.150958] env[68674]: DEBUG oslo_vmware.api [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1143.150958] env[68674]: value = "task-3240923" [ 1143.150958] env[68674]: _type = "Task" [ 1143.150958] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.160226] env[68674]: DEBUG oslo_vmware.api [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3240923, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.390457] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1143.423459] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-564f0123-7ec8-4a75-9f4a-66ccd8abb9dc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.440109] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4372de71-9dc2-4195-acd3-8fae5ee87259 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.458922] env[68674]: DEBUG oslo_vmware.api [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240921, 'name': CloneVM_Task, 'duration_secs': 1.26136} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.487495] env[68674]: INFO nova.virt.vmwareapi.vmops [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Created linked-clone VM from snapshot [ 1143.488642] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52968143-38c3-4e5e-ab20-54cd826892ef {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.491820] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f84a17e-27c3-4b20-aa6f-511b1afad823 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.499823] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Uploading image 4567b890-657d-4354-bb3d-083e8acf9038 {{(pid=68674) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1143.506438] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27b4e87d-076e-4525-8f82-b7d88614ee46 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.522206] env[68674]: DEBUG nova.compute.provider_tree [None req-da63f90e-2159-40fd-8c14-c7bfd1d2be03 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1143.537699] env[68674]: DEBUG oslo_vmware.rw_handles [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1143.537699] env[68674]: value = "vm-647701" [ 1143.537699] env[68674]: _type = "VirtualMachine" [ 1143.537699] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1143.538263] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-5eafc5dd-7d3c-4835-a537-864e5292fac0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.545405] env[68674]: DEBUG oslo_vmware.rw_handles [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lease: (returnval){ [ 1143.545405] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5230b236-6994-ac80-6a9e-6218637a75d5" [ 1143.545405] env[68674]: _type = "HttpNfcLease" [ 1143.545405] env[68674]: } obtained for exporting VM: (result){ [ 1143.545405] env[68674]: value = "vm-647701" [ 1143.545405] env[68674]: _type = "VirtualMachine" [ 1143.545405] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1143.545707] env[68674]: DEBUG oslo_vmware.api [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the lease: (returnval){ [ 1143.545707] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5230b236-6994-ac80-6a9e-6218637a75d5" [ 1143.545707] env[68674]: _type = "HttpNfcLease" [ 1143.545707] env[68674]: } to be ready. {{(pid=68674) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1143.552489] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1143.552489] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5230b236-6994-ac80-6a9e-6218637a75d5" [ 1143.552489] env[68674]: _type = "HttpNfcLease" [ 1143.552489] env[68674]: } is initializing. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1143.590685] env[68674]: DEBUG nova.compute.manager [req-8a0ac1ba-afa0-413f-b91e-3fa6a9408999 req-8c265d18-79e9-4dca-aec0-3638d3689d89 service nova] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Received event network-changed-76984c11-ddde-4f48-8818-0911510f8452 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1143.590864] env[68674]: DEBUG nova.compute.manager [req-8a0ac1ba-afa0-413f-b91e-3fa6a9408999 req-8c265d18-79e9-4dca-aec0-3638d3689d89 service nova] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Refreshing instance network info cache due to event network-changed-76984c11-ddde-4f48-8818-0911510f8452. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1143.591089] env[68674]: DEBUG oslo_concurrency.lockutils [req-8a0ac1ba-afa0-413f-b91e-3fa6a9408999 req-8c265d18-79e9-4dca-aec0-3638d3689d89 service nova] Acquiring lock "refresh_cache-2efe81dd-caa3-4fde-8be0-fbf399ce99e0" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1143.591231] env[68674]: DEBUG oslo_concurrency.lockutils [req-8a0ac1ba-afa0-413f-b91e-3fa6a9408999 req-8c265d18-79e9-4dca-aec0-3638d3689d89 service nova] Acquired lock "refresh_cache-2efe81dd-caa3-4fde-8be0-fbf399ce99e0" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1143.591400] env[68674]: DEBUG nova.network.neutron [req-8a0ac1ba-afa0-413f-b91e-3fa6a9408999 req-8c265d18-79e9-4dca-aec0-3638d3689d89 service nova] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Refreshing network info cache for port 76984c11-ddde-4f48-8818-0911510f8452 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1143.613373] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: fa89e0b5-590d-43fb-bb11-02f8fdee0c2f] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1143.662880] env[68674]: DEBUG oslo_vmware.api [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3240923, 'name': Rename_Task, 'duration_secs': 0.287028} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.663207] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1143.663460] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7ff43769-bd08-4523-a920-f0c63eb161b8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.671279] env[68674]: DEBUG oslo_vmware.api [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1143.671279] env[68674]: value = "task-3240925" [ 1143.671279] env[68674]: _type = "Task" [ 1143.671279] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.678962] env[68674]: DEBUG oslo_vmware.api [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3240925, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.734054] env[68674]: DEBUG nova.network.neutron [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Successfully updated port: 99e1922b-5bc3-4df6-aa48-4502164af67c {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1144.030761] env[68674]: DEBUG nova.scheduler.client.report [None req-da63f90e-2159-40fd-8c14-c7bfd1d2be03 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1144.054535] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1144.054535] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5230b236-6994-ac80-6a9e-6218637a75d5" [ 1144.054535] env[68674]: _type = "HttpNfcLease" [ 1144.054535] env[68674]: } is ready. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1144.055514] env[68674]: DEBUG oslo_vmware.rw_handles [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1144.055514] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5230b236-6994-ac80-6a9e-6218637a75d5" [ 1144.055514] env[68674]: _type = "HttpNfcLease" [ 1144.055514] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1144.056567] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-531c3fa0-9d3f-4e67-87db-891f97a8444f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.064440] env[68674]: DEBUG oslo_vmware.rw_handles [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bfcdca-25b9-c25e-1e5b-b630a40f41ea/disk-0.vmdk from lease info. {{(pid=68674) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1144.064644] env[68674]: DEBUG oslo_vmware.rw_handles [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bfcdca-25b9-c25e-1e5b-b630a40f41ea/disk-0.vmdk for reading. {{(pid=68674) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1144.127163] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 3d85c8c4-f09c-4f75-aff5-9a49d84ae006] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1144.162352] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4aea08d0-5590-474c-8b25-4c326a43d379 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.165516] env[68674]: DEBUG nova.network.neutron [req-8a0ac1ba-afa0-413f-b91e-3fa6a9408999 req-8c265d18-79e9-4dca-aec0-3638d3689d89 service nova] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1144.181635] env[68674]: DEBUG oslo_vmware.api [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3240925, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.251323] env[68674]: DEBUG nova.network.neutron [req-8a0ac1ba-afa0-413f-b91e-3fa6a9408999 req-8c265d18-79e9-4dca-aec0-3638d3689d89 service nova] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1144.630259] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: baa7cd2c-73f8-43ac-9f7b-c3efae6d5e82] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1144.683535] env[68674]: DEBUG oslo_vmware.api [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3240925, 'name': PowerOnVM_Task, 'duration_secs': 0.615735} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.684026] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1144.684125] env[68674]: INFO nova.compute.manager [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Took 8.92 seconds to spawn the instance on the hypervisor. [ 1144.684387] env[68674]: DEBUG nova.compute.manager [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1144.685345] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9502f432-f625-42f4-92f1-50e834a64931 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.755760] env[68674]: DEBUG oslo_concurrency.lockutils [req-8a0ac1ba-afa0-413f-b91e-3fa6a9408999 req-8c265d18-79e9-4dca-aec0-3638d3689d89 service nova] Releasing lock "refresh_cache-2efe81dd-caa3-4fde-8be0-fbf399ce99e0" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1145.045551] env[68674]: DEBUG oslo_concurrency.lockutils [None req-da63f90e-2159-40fd-8c14-c7bfd1d2be03 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.315s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1145.048907] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.658s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1145.049073] env[68674]: DEBUG nova.objects.instance [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lazy-loading 'pci_requests' on Instance uuid ba4bfbb4-a89b-4ab6-964e-792647fd5a89 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1145.128628] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ff1f3bf0-1b37-444f-8a8a-3eca7126ae4c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "5384c82b-a584-430f-8ef1-e2731562b5ff" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1145.128869] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ff1f3bf0-1b37-444f-8a8a-3eca7126ae4c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "5384c82b-a584-430f-8ef1-e2731562b5ff" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1145.133908] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1145.210721] env[68674]: INFO nova.compute.manager [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Took 13.77 seconds to build instance. [ 1145.555144] env[68674]: DEBUG nova.objects.instance [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lazy-loading 'numa_topology' on Instance uuid ba4bfbb4-a89b-4ab6-964e-792647fd5a89 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1145.612937] env[68674]: INFO nova.scheduler.client.report [None req-da63f90e-2159-40fd-8c14-c7bfd1d2be03 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Deleted allocation for migration 6d5e6e1f-457d-47c7-90f2-f0eac17a7e64 [ 1145.627521] env[68674]: DEBUG nova.compute.manager [req-606253a9-8092-441f-b2c5-7dd81423d177 req-729702d4-2b7e-4280-8ba3-3395405d4a8a service nova] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Received event network-vif-plugged-99e1922b-5bc3-4df6-aa48-4502164af67c {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1145.627718] env[68674]: DEBUG oslo_concurrency.lockutils [req-606253a9-8092-441f-b2c5-7dd81423d177 req-729702d4-2b7e-4280-8ba3-3395405d4a8a service nova] Acquiring lock "2efe81dd-caa3-4fde-8be0-fbf399ce99e0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1145.627873] env[68674]: DEBUG oslo_concurrency.lockutils [req-606253a9-8092-441f-b2c5-7dd81423d177 req-729702d4-2b7e-4280-8ba3-3395405d4a8a service nova] Lock "2efe81dd-caa3-4fde-8be0-fbf399ce99e0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1145.628537] env[68674]: DEBUG oslo_concurrency.lockutils [req-606253a9-8092-441f-b2c5-7dd81423d177 req-729702d4-2b7e-4280-8ba3-3395405d4a8a service nova] Lock "2efe81dd-caa3-4fde-8be0-fbf399ce99e0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1145.628791] env[68674]: DEBUG nova.compute.manager [req-606253a9-8092-441f-b2c5-7dd81423d177 req-729702d4-2b7e-4280-8ba3-3395405d4a8a service nova] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] No waiting events found dispatching network-vif-plugged-99e1922b-5bc3-4df6-aa48-4502164af67c {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1145.629526] env[68674]: WARNING nova.compute.manager [req-606253a9-8092-441f-b2c5-7dd81423d177 req-729702d4-2b7e-4280-8ba3-3395405d4a8a service nova] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Received unexpected event network-vif-plugged-99e1922b-5bc3-4df6-aa48-4502164af67c for instance with vm_state building and task_state spawning. [ 1145.629682] env[68674]: DEBUG nova.compute.manager [req-606253a9-8092-441f-b2c5-7dd81423d177 req-729702d4-2b7e-4280-8ba3-3395405d4a8a service nova] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Received event network-changed-99e1922b-5bc3-4df6-aa48-4502164af67c {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1145.629924] env[68674]: DEBUG nova.compute.manager [req-606253a9-8092-441f-b2c5-7dd81423d177 req-729702d4-2b7e-4280-8ba3-3395405d4a8a service nova] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Refreshing instance network info cache due to event network-changed-99e1922b-5bc3-4df6-aa48-4502164af67c. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1145.630168] env[68674]: DEBUG oslo_concurrency.lockutils [req-606253a9-8092-441f-b2c5-7dd81423d177 req-729702d4-2b7e-4280-8ba3-3395405d4a8a service nova] Acquiring lock "refresh_cache-2efe81dd-caa3-4fde-8be0-fbf399ce99e0" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1145.630339] env[68674]: DEBUG oslo_concurrency.lockutils [req-606253a9-8092-441f-b2c5-7dd81423d177 req-729702d4-2b7e-4280-8ba3-3395405d4a8a service nova] Acquired lock "refresh_cache-2efe81dd-caa3-4fde-8be0-fbf399ce99e0" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1145.630564] env[68674]: DEBUG nova.network.neutron [req-606253a9-8092-441f-b2c5-7dd81423d177 req-729702d4-2b7e-4280-8ba3-3395405d4a8a service nova] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Refreshing network info cache for port 99e1922b-5bc3-4df6-aa48-4502164af67c {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1145.633802] env[68674]: DEBUG nova.compute.utils [None req-ff1f3bf0-1b37-444f-8a8a-3eca7126ae4c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1145.713351] env[68674]: DEBUG oslo_concurrency.lockutils [None req-47115740-d80c-447d-b7b8-ea6905924c74 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.285s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1145.789284] env[68674]: DEBUG nova.network.neutron [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Successfully updated port: 1d39a196-8312-4fd9-b20f-b5ee8c06556b {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1145.855834] env[68674]: DEBUG nova.compute.manager [req-d984bd26-54a5-4bec-b1fe-6166bd15e574 req-3b524c8d-1b9a-4716-a671-c43ee3fc08f5 service nova] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Received event network-vif-plugged-1d39a196-8312-4fd9-b20f-b5ee8c06556b {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1145.855834] env[68674]: DEBUG oslo_concurrency.lockutils [req-d984bd26-54a5-4bec-b1fe-6166bd15e574 req-3b524c8d-1b9a-4716-a671-c43ee3fc08f5 service nova] Acquiring lock "2efe81dd-caa3-4fde-8be0-fbf399ce99e0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1145.855834] env[68674]: DEBUG oslo_concurrency.lockutils [req-d984bd26-54a5-4bec-b1fe-6166bd15e574 req-3b524c8d-1b9a-4716-a671-c43ee3fc08f5 service nova] Lock "2efe81dd-caa3-4fde-8be0-fbf399ce99e0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1145.855834] env[68674]: DEBUG oslo_concurrency.lockutils [req-d984bd26-54a5-4bec-b1fe-6166bd15e574 req-3b524c8d-1b9a-4716-a671-c43ee3fc08f5 service nova] Lock "2efe81dd-caa3-4fde-8be0-fbf399ce99e0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1145.856516] env[68674]: DEBUG nova.compute.manager [req-d984bd26-54a5-4bec-b1fe-6166bd15e574 req-3b524c8d-1b9a-4716-a671-c43ee3fc08f5 service nova] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] No waiting events found dispatching network-vif-plugged-1d39a196-8312-4fd9-b20f-b5ee8c06556b {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1145.856839] env[68674]: WARNING nova.compute.manager [req-d984bd26-54a5-4bec-b1fe-6166bd15e574 req-3b524c8d-1b9a-4716-a671-c43ee3fc08f5 service nova] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Received unexpected event network-vif-plugged-1d39a196-8312-4fd9-b20f-b5ee8c06556b for instance with vm_state building and task_state spawning. [ 1146.059375] env[68674]: INFO nova.compute.claims [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1146.120238] env[68674]: DEBUG oslo_concurrency.lockutils [None req-da63f90e-2159-40fd-8c14-c7bfd1d2be03 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "c876b288-de2a-4195-bfef-88f38e219d9a" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.874s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1146.137897] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ff1f3bf0-1b37-444f-8a8a-3eca7126ae4c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "5384c82b-a584-430f-8ef1-e2731562b5ff" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1146.184954] env[68674]: DEBUG nova.network.neutron [req-606253a9-8092-441f-b2c5-7dd81423d177 req-729702d4-2b7e-4280-8ba3-3395405d4a8a service nova] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1146.292101] env[68674]: DEBUG oslo_concurrency.lockutils [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Acquiring lock "refresh_cache-2efe81dd-caa3-4fde-8be0-fbf399ce99e0" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1146.300492] env[68674]: DEBUG nova.network.neutron [req-606253a9-8092-441f-b2c5-7dd81423d177 req-729702d4-2b7e-4280-8ba3-3395405d4a8a service nova] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1146.572673] env[68674]: INFO nova.compute.manager [None req-58694215-8d8e-4f02-9c4d-aa396759df0a tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Get console output [ 1146.572871] env[68674]: WARNING nova.virt.vmwareapi.driver [None req-58694215-8d8e-4f02-9c4d-aa396759df0a tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] The console log is missing. Check your VSPC configuration [ 1146.803622] env[68674]: DEBUG oslo_concurrency.lockutils [req-606253a9-8092-441f-b2c5-7dd81423d177 req-729702d4-2b7e-4280-8ba3-3395405d4a8a service nova] Releasing lock "refresh_cache-2efe81dd-caa3-4fde-8be0-fbf399ce99e0" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1146.803992] env[68674]: DEBUG oslo_concurrency.lockutils [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Acquired lock "refresh_cache-2efe81dd-caa3-4fde-8be0-fbf399ce99e0" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1146.804187] env[68674]: DEBUG nova.network.neutron [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1147.044038] env[68674]: DEBUG oslo_vmware.rw_handles [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524a47e8-9e70-badf-0ba0-32b2d0c80174/disk-0.vmdk. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1147.045024] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ec550b0-e92e-4cd1-97e6-b00e741ddb5e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.051817] env[68674]: DEBUG oslo_vmware.rw_handles [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524a47e8-9e70-badf-0ba0-32b2d0c80174/disk-0.vmdk is in state: ready. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1147.052056] env[68674]: ERROR oslo_vmware.rw_handles [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524a47e8-9e70-badf-0ba0-32b2d0c80174/disk-0.vmdk due to incomplete transfer. [ 1147.052306] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-c6854508-8d9b-4696-9eea-4493eb87d32b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.060108] env[68674]: DEBUG oslo_vmware.rw_handles [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524a47e8-9e70-badf-0ba0-32b2d0c80174/disk-0.vmdk. {{(pid=68674) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1147.060337] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Uploaded image b7ab22f3-f5be-4404-8582-a3a1de837a28 to the Glance image server {{(pid=68674) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1147.062647] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Destroying the VM {{(pid=68674) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1147.062948] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-91b32878-3f36-4153-bd29-2f80e00a0b96 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.069811] env[68674]: DEBUG oslo_vmware.api [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1147.069811] env[68674]: value = "task-3240926" [ 1147.069811] env[68674]: _type = "Task" [ 1147.069811] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.084195] env[68674]: DEBUG oslo_vmware.api [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240926, 'name': Destroy_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.219650] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ff1f3bf0-1b37-444f-8a8a-3eca7126ae4c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "5384c82b-a584-430f-8ef1-e2731562b5ff" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1147.219939] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ff1f3bf0-1b37-444f-8a8a-3eca7126ae4c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "5384c82b-a584-430f-8ef1-e2731562b5ff" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1147.220518] env[68674]: INFO nova.compute.manager [None req-ff1f3bf0-1b37-444f-8a8a-3eca7126ae4c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Attaching volume 7b439213-1295-4de8-b800-8ba487a72af9 to /dev/sdb [ 1147.252756] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e6132c5-6009-42ad-b517-4237bed4cb75 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.259495] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a4d8be1-3348-4b26-b873-15e0ae99cbc3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.265601] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcd2420a-c0c1-4592-8bd6-f160aa539306 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.272503] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-770f5981-29b7-40f1-a8e2-927e88c6363d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.302888] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9444e64a-8f54-4268-a29d-9dd73cde0b64 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.315219] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f7fa263-a5f9-4cae-9f85-3da1e602ef36 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.319673] env[68674]: DEBUG nova.virt.block_device [None req-ff1f3bf0-1b37-444f-8a8a-3eca7126ae4c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Updating existing volume attachment record: c9f1d40f-c414-4309-9513-96b16d330ae8 {{(pid=68674) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1147.331535] env[68674]: DEBUG nova.compute.provider_tree [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1147.365224] env[68674]: DEBUG nova.network.neutron [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1147.581130] env[68674]: DEBUG oslo_vmware.api [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240926, 'name': Destroy_Task} progress is 33%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.633389] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1147.803037] env[68674]: DEBUG nova.network.neutron [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Updating instance_info_cache with network_info: [{"id": "76984c11-ddde-4f48-8818-0911510f8452", "address": "fa:16:3e:eb:76:a3", "network": {"id": "4b430261-417f-4868-a492-98b3b9f0467f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1859201300", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.105", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e73e759715a4e39a03bd234d918b2fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8812601-ae67-4e0d-b9a2-710b86c53ac5", "external-id": "nsx-vlan-transportzone-85", "segmentation_id": 85, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76984c11-dd", "ovs_interfaceid": "76984c11-ddde-4f48-8818-0911510f8452", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "99e1922b-5bc3-4df6-aa48-4502164af67c", "address": "fa:16:3e:24:52:08", "network": {"id": "079ae866-5b90-4122-b326-43b9a901e04c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-688371485", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.144", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "8e73e759715a4e39a03bd234d918b2fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a58387dd-f438-4913-af6a-fafb734cd881", "external-id": "nsx-vlan-transportzone-169", "segmentation_id": 169, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99e1922b-5b", "ovs_interfaceid": "99e1922b-5bc3-4df6-aa48-4502164af67c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1d39a196-8312-4fd9-b20f-b5ee8c06556b", "address": "fa:16:3e:bf:b6:bf", "network": {"id": "4b430261-417f-4868-a492-98b3b9f0467f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1859201300", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.220", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e73e759715a4e39a03bd234d918b2fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8812601-ae67-4e0d-b9a2-710b86c53ac5", "external-id": "nsx-vlan-transportzone-85", "segmentation_id": 85, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d39a196-83", "ovs_interfaceid": "1d39a196-8312-4fd9-b20f-b5ee8c06556b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1147.834874] env[68674]: DEBUG nova.scheduler.client.report [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1147.888228] env[68674]: DEBUG nova.compute.manager [req-c01d6567-ff91-4032-81ec-22700b5fa8ec req-6b5c244a-10e4-452f-8de4-33398f761560 service nova] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Received event network-changed-1d39a196-8312-4fd9-b20f-b5ee8c06556b {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1147.888465] env[68674]: DEBUG nova.compute.manager [req-c01d6567-ff91-4032-81ec-22700b5fa8ec req-6b5c244a-10e4-452f-8de4-33398f761560 service nova] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Refreshing instance network info cache due to event network-changed-1d39a196-8312-4fd9-b20f-b5ee8c06556b. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1147.888786] env[68674]: DEBUG oslo_concurrency.lockutils [req-c01d6567-ff91-4032-81ec-22700b5fa8ec req-6b5c244a-10e4-452f-8de4-33398f761560 service nova] Acquiring lock "refresh_cache-2efe81dd-caa3-4fde-8be0-fbf399ce99e0" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1148.083186] env[68674]: DEBUG oslo_vmware.api [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240926, 'name': Destroy_Task, 'duration_secs': 0.591853} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.083564] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Destroyed the VM [ 1148.083925] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Deleting Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1148.084301] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-7fdb6759-fbd8-407a-9dfd-db35b40a9b6a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.091470] env[68674]: DEBUG oslo_vmware.api [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1148.091470] env[68674]: value = "task-3240928" [ 1148.091470] env[68674]: _type = "Task" [ 1148.091470] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.103873] env[68674]: DEBUG oslo_vmware.api [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240928, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.140032] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._sync_power_states {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1148.306235] env[68674]: DEBUG oslo_concurrency.lockutils [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Releasing lock "refresh_cache-2efe81dd-caa3-4fde-8be0-fbf399ce99e0" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1148.306761] env[68674]: DEBUG nova.compute.manager [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Instance network_info: |[{"id": "76984c11-ddde-4f48-8818-0911510f8452", "address": "fa:16:3e:eb:76:a3", "network": {"id": "4b430261-417f-4868-a492-98b3b9f0467f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1859201300", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.105", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e73e759715a4e39a03bd234d918b2fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8812601-ae67-4e0d-b9a2-710b86c53ac5", "external-id": "nsx-vlan-transportzone-85", "segmentation_id": 85, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76984c11-dd", "ovs_interfaceid": "76984c11-ddde-4f48-8818-0911510f8452", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "99e1922b-5bc3-4df6-aa48-4502164af67c", "address": "fa:16:3e:24:52:08", "network": {"id": "079ae866-5b90-4122-b326-43b9a901e04c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-688371485", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.144", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "8e73e759715a4e39a03bd234d918b2fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a58387dd-f438-4913-af6a-fafb734cd881", "external-id": "nsx-vlan-transportzone-169", "segmentation_id": 169, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99e1922b-5b", "ovs_interfaceid": "99e1922b-5bc3-4df6-aa48-4502164af67c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1d39a196-8312-4fd9-b20f-b5ee8c06556b", "address": "fa:16:3e:bf:b6:bf", "network": {"id": "4b430261-417f-4868-a492-98b3b9f0467f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1859201300", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.220", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e73e759715a4e39a03bd234d918b2fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8812601-ae67-4e0d-b9a2-710b86c53ac5", "external-id": "nsx-vlan-transportzone-85", "segmentation_id": 85, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d39a196-83", "ovs_interfaceid": "1d39a196-8312-4fd9-b20f-b5ee8c06556b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1148.307139] env[68674]: DEBUG oslo_concurrency.lockutils [req-c01d6567-ff91-4032-81ec-22700b5fa8ec req-6b5c244a-10e4-452f-8de4-33398f761560 service nova] Acquired lock "refresh_cache-2efe81dd-caa3-4fde-8be0-fbf399ce99e0" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1148.307344] env[68674]: DEBUG nova.network.neutron [req-c01d6567-ff91-4032-81ec-22700b5fa8ec req-6b5c244a-10e4-452f-8de4-33398f761560 service nova] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Refreshing network info cache for port 1d39a196-8312-4fd9-b20f-b5ee8c06556b {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1148.309058] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:eb:76:a3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8812601-ae67-4e0d-b9a2-710b86c53ac5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '76984c11-ddde-4f48-8818-0911510f8452', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:24:52:08', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a58387dd-f438-4913-af6a-fafb734cd881', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '99e1922b-5bc3-4df6-aa48-4502164af67c', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:bf:b6:bf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8812601-ae67-4e0d-b9a2-710b86c53ac5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1d39a196-8312-4fd9-b20f-b5ee8c06556b', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1148.320725] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Creating folder: Project (8e73e759715a4e39a03bd234d918b2fb). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1148.323770] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-136e41b8-b7e3-4222-ad1a-8b0325ea0dc4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.335529] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Created folder: Project (8e73e759715a4e39a03bd234d918b2fb) in parent group-v647377. [ 1148.335739] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Creating folder: Instances. Parent ref: group-v647703. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1148.335977] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1d18f596-a065-4047-adf2-9475c61d4e3c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.339614] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.291s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1148.344997] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Created folder: Instances in parent group-v647703. [ 1148.345339] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1148.345588] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1148.345824] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-369ee727-955a-4c39-beb8-bbb3b047a118 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.370698] env[68674]: INFO nova.network.neutron [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Updating port 7309bd2b-c077-4257-8efb-bf6e8d516ab7 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1148.374057] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1148.374057] env[68674]: value = "task-3240931" [ 1148.374057] env[68674]: _type = "Task" [ 1148.374057] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.381706] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240931, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.539795] env[68674]: DEBUG nova.network.neutron [req-c01d6567-ff91-4032-81ec-22700b5fa8ec req-6b5c244a-10e4-452f-8de4-33398f761560 service nova] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Updated VIF entry in instance network info cache for port 1d39a196-8312-4fd9-b20f-b5ee8c06556b. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1148.540602] env[68674]: DEBUG nova.network.neutron [req-c01d6567-ff91-4032-81ec-22700b5fa8ec req-6b5c244a-10e4-452f-8de4-33398f761560 service nova] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Updating instance_info_cache with network_info: [{"id": "76984c11-ddde-4f48-8818-0911510f8452", "address": "fa:16:3e:eb:76:a3", "network": {"id": "4b430261-417f-4868-a492-98b3b9f0467f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1859201300", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.105", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e73e759715a4e39a03bd234d918b2fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8812601-ae67-4e0d-b9a2-710b86c53ac5", "external-id": "nsx-vlan-transportzone-85", "segmentation_id": 85, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76984c11-dd", "ovs_interfaceid": "76984c11-ddde-4f48-8818-0911510f8452", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "99e1922b-5bc3-4df6-aa48-4502164af67c", "address": "fa:16:3e:24:52:08", "network": {"id": "079ae866-5b90-4122-b326-43b9a901e04c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-688371485", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.144", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "8e73e759715a4e39a03bd234d918b2fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a58387dd-f438-4913-af6a-fafb734cd881", "external-id": "nsx-vlan-transportzone-169", "segmentation_id": 169, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99e1922b-5b", "ovs_interfaceid": "99e1922b-5bc3-4df6-aa48-4502164af67c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1d39a196-8312-4fd9-b20f-b5ee8c06556b", "address": "fa:16:3e:bf:b6:bf", "network": {"id": "4b430261-417f-4868-a492-98b3b9f0467f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1859201300", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.220", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e73e759715a4e39a03bd234d918b2fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8812601-ae67-4e0d-b9a2-710b86c53ac5", "external-id": "nsx-vlan-transportzone-85", "segmentation_id": 85, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d39a196-83", "ovs_interfaceid": "1d39a196-8312-4fd9-b20f-b5ee8c06556b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1148.603586] env[68674]: DEBUG oslo_vmware.api [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240928, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.647525] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Getting list of instances from cluster (obj){ [ 1148.647525] env[68674]: value = "domain-c8" [ 1148.647525] env[68674]: _type = "ClusterComputeResource" [ 1148.647525] env[68674]: } {{(pid=68674) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1148.650585] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5d0018e-0b03-41ba-b733-0661a8cca817 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.671697] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Got total of 10 instances {{(pid=68674) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1148.671924] env[68674]: WARNING nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] While synchronizing instance power states, found 12 instances in the database and 10 instances on the hypervisor. [ 1148.672027] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Triggering sync for uuid 23891bad-1b63-4237-9243-78954cf67d52 {{(pid=68674) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1148.672229] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Triggering sync for uuid 30731a3c-34ba-40c8-9b8f-2d867eff4f21 {{(pid=68674) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1148.672859] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Triggering sync for uuid 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a {{(pid=68674) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1148.672859] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Triggering sync for uuid ba4bfbb4-a89b-4ab6-964e-792647fd5a89 {{(pid=68674) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1148.672859] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Triggering sync for uuid dbbf1313-6e44-45e2-8bf6-83409f06cb4b {{(pid=68674) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1148.672859] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Triggering sync for uuid 9b8aad00-0980-4752-954a-c09c9ae6f9ec {{(pid=68674) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1148.673106] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Triggering sync for uuid 5384c82b-a584-430f-8ef1-e2731562b5ff {{(pid=68674) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1148.673146] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Triggering sync for uuid c876b288-de2a-4195-bfef-88f38e219d9a {{(pid=68674) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1148.673287] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Triggering sync for uuid 8f183286-f908-4d05-9a61-d6b1bf10dfb9 {{(pid=68674) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1148.673464] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Triggering sync for uuid 6cf18175-1436-4ba5-b4b3-8641ec6bdad1 {{(pid=68674) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1148.673637] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Triggering sync for uuid c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f {{(pid=68674) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1148.673788] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Triggering sync for uuid 2efe81dd-caa3-4fde-8be0-fbf399ce99e0 {{(pid=68674) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1148.674210] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Acquiring lock "23891bad-1b63-4237-9243-78954cf67d52" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.674438] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "23891bad-1b63-4237-9243-78954cf67d52" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1148.674719] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Acquiring lock "30731a3c-34ba-40c8-9b8f-2d867eff4f21" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.674900] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "30731a3c-34ba-40c8-9b8f-2d867eff4f21" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1148.675139] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Acquiring lock "142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.675319] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1148.675647] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Acquiring lock "ba4bfbb4-a89b-4ab6-964e-792647fd5a89" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.675854] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Acquiring lock "dbbf1313-6e44-45e2-8bf6-83409f06cb4b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.676075] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "dbbf1313-6e44-45e2-8bf6-83409f06cb4b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1148.676259] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Acquiring lock "9b8aad00-0980-4752-954a-c09c9ae6f9ec" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.676466] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Acquiring lock "5384c82b-a584-430f-8ef1-e2731562b5ff" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.676672] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Acquiring lock "c876b288-de2a-4195-bfef-88f38e219d9a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.676841] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "c876b288-de2a-4195-bfef-88f38e219d9a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1148.677075] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Acquiring lock "8f183286-f908-4d05-9a61-d6b1bf10dfb9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.677267] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "8f183286-f908-4d05-9a61-d6b1bf10dfb9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1148.677475] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Acquiring lock "6cf18175-1436-4ba5-b4b3-8641ec6bdad1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.677681] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Acquiring lock "c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.677880] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1148.678119] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Acquiring lock "2efe81dd-caa3-4fde-8be0-fbf399ce99e0" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.679143] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca616be4-28a9-4ea7-99fa-e00559fd3483 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.682388] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64b88685-65fb-496a-b83b-9a31dafb2e95 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.685563] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50e559c7-57b2-4726-8469-83654e09117f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.688841] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07c3feeb-8045-460e-9ca7-a87d20f90c95 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.691979] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dd6a8e2-aa47-471d-bfc9-ec3d767939ce {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.695122] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-715f33a1-7d28-49d2-8bb2-a321b6fe9d33 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.701628] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daf50038-3a19-4e3c-b1ab-5647eeb58b75 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.883490] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240931, 'name': CreateVM_Task, 'duration_secs': 0.489162} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.883862] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1148.884621] env[68674]: DEBUG oslo_concurrency.lockutils [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1148.884811] env[68674]: DEBUG oslo_concurrency.lockutils [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1148.885147] env[68674]: DEBUG oslo_concurrency.lockutils [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1148.885428] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-035d5c39-28c4-4bf4-985f-cce34d1bf3f8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.890598] env[68674]: DEBUG oslo_vmware.api [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Waiting for the task: (returnval){ [ 1148.890598] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5244365e-5d3a-1b44-052d-3878691de0c8" [ 1148.890598] env[68674]: _type = "Task" [ 1148.890598] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.898473] env[68674]: DEBUG oslo_vmware.api [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5244365e-5d3a-1b44-052d-3878691de0c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.044546] env[68674]: DEBUG oslo_concurrency.lockutils [req-c01d6567-ff91-4032-81ec-22700b5fa8ec req-6b5c244a-10e4-452f-8de4-33398f761560 service nova] Releasing lock "refresh_cache-2efe81dd-caa3-4fde-8be0-fbf399ce99e0" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1149.044968] env[68674]: DEBUG nova.compute.manager [req-c01d6567-ff91-4032-81ec-22700b5fa8ec req-6b5c244a-10e4-452f-8de4-33398f761560 service nova] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Received event network-changed-47cc2f82-8285-4168-b696-407ade0efaaf {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1149.045264] env[68674]: DEBUG nova.compute.manager [req-c01d6567-ff91-4032-81ec-22700b5fa8ec req-6b5c244a-10e4-452f-8de4-33398f761560 service nova] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Refreshing instance network info cache due to event network-changed-47cc2f82-8285-4168-b696-407ade0efaaf. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1149.045578] env[68674]: DEBUG oslo_concurrency.lockutils [req-c01d6567-ff91-4032-81ec-22700b5fa8ec req-6b5c244a-10e4-452f-8de4-33398f761560 service nova] Acquiring lock "refresh_cache-c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.045805] env[68674]: DEBUG oslo_concurrency.lockutils [req-c01d6567-ff91-4032-81ec-22700b5fa8ec req-6b5c244a-10e4-452f-8de4-33398f761560 service nova] Acquired lock "refresh_cache-c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1149.046114] env[68674]: DEBUG nova.network.neutron [req-c01d6567-ff91-4032-81ec-22700b5fa8ec req-6b5c244a-10e4-452f-8de4-33398f761560 service nova] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Refreshing network info cache for port 47cc2f82-8285-4168-b696-407ade0efaaf {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1149.101758] env[68674]: DEBUG oslo_vmware.api [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240928, 'name': RemoveSnapshot_Task, 'duration_secs': 0.828614} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.101994] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Deleted Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1149.102310] env[68674]: DEBUG nova.compute.manager [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1149.103079] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e6daba4-6b09-4548-b285-8ecd65950b59 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.224799] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "c876b288-de2a-4195-bfef-88f38e219d9a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.548s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.225238] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "dbbf1313-6e44-45e2-8bf6-83409f06cb4b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.549s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.225621] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "30731a3c-34ba-40c8-9b8f-2d867eff4f21" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.551s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.225970] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.551s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.226313] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "23891bad-1b63-4237-9243-78954cf67d52" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.552s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.234522] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "8f183286-f908-4d05-9a61-d6b1bf10dfb9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.557s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.234886] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.557s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.401211] env[68674]: DEBUG oslo_vmware.api [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5244365e-5d3a-1b44-052d-3878691de0c8, 'name': SearchDatastore_Task, 'duration_secs': 0.011532} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.401534] env[68674]: DEBUG oslo_concurrency.lockutils [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1149.401787] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1149.402041] env[68674]: DEBUG oslo_concurrency.lockutils [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.402196] env[68674]: DEBUG oslo_concurrency.lockutils [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1149.402377] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1149.402639] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d2b94f59-4698-48a9-8435-50743b99d21f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.411402] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1149.411594] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1149.412317] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a8bf3e5-9183-4a75-975b-36f7647334af {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.417628] env[68674]: DEBUG oslo_vmware.api [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Waiting for the task: (returnval){ [ 1149.417628] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524becd8-9d68-c821-2043-74d2729ba35d" [ 1149.417628] env[68674]: _type = "Task" [ 1149.417628] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.425258] env[68674]: DEBUG oslo_vmware.api [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524becd8-9d68-c821-2043-74d2729ba35d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.618019] env[68674]: INFO nova.compute.manager [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Shelve offloading [ 1149.852987] env[68674]: DEBUG nova.network.neutron [req-c01d6567-ff91-4032-81ec-22700b5fa8ec req-6b5c244a-10e4-452f-8de4-33398f761560 service nova] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Updated VIF entry in instance network info cache for port 47cc2f82-8285-4168-b696-407ade0efaaf. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1149.853684] env[68674]: DEBUG nova.network.neutron [req-c01d6567-ff91-4032-81ec-22700b5fa8ec req-6b5c244a-10e4-452f-8de4-33398f761560 service nova] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Updating instance_info_cache with network_info: [{"id": "47cc2f82-8285-4168-b696-407ade0efaaf", "address": "fa:16:3e:90:e0:85", "network": {"id": "f2a6b57a-fec9-4bd2-9828-2b72f21f2393", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1479923638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.139", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f2a133c72064227bd419d63d5d9557f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47cc2f82-82", "ovs_interfaceid": "47cc2f82-8285-4168-b696-407ade0efaaf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1149.878507] env[68674]: DEBUG nova.compute.manager [req-c3ce6dd7-b218-438a-8c17-a6e3fe9b423b req-40396549-2e7e-4b94-ae86-2013f3ebab37 service nova] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Received event network-vif-plugged-7309bd2b-c077-4257-8efb-bf6e8d516ab7 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1149.878661] env[68674]: DEBUG oslo_concurrency.lockutils [req-c3ce6dd7-b218-438a-8c17-a6e3fe9b423b req-40396549-2e7e-4b94-ae86-2013f3ebab37 service nova] Acquiring lock "ba4bfbb4-a89b-4ab6-964e-792647fd5a89-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1149.878862] env[68674]: DEBUG oslo_concurrency.lockutils [req-c3ce6dd7-b218-438a-8c17-a6e3fe9b423b req-40396549-2e7e-4b94-ae86-2013f3ebab37 service nova] Lock "ba4bfbb4-a89b-4ab6-964e-792647fd5a89-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1149.879037] env[68674]: DEBUG oslo_concurrency.lockutils [req-c3ce6dd7-b218-438a-8c17-a6e3fe9b423b req-40396549-2e7e-4b94-ae86-2013f3ebab37 service nova] Lock "ba4bfbb4-a89b-4ab6-964e-792647fd5a89-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.879217] env[68674]: DEBUG nova.compute.manager [req-c3ce6dd7-b218-438a-8c17-a6e3fe9b423b req-40396549-2e7e-4b94-ae86-2013f3ebab37 service nova] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] No waiting events found dispatching network-vif-plugged-7309bd2b-c077-4257-8efb-bf6e8d516ab7 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1149.879384] env[68674]: WARNING nova.compute.manager [req-c3ce6dd7-b218-438a-8c17-a6e3fe9b423b req-40396549-2e7e-4b94-ae86-2013f3ebab37 service nova] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Received unexpected event network-vif-plugged-7309bd2b-c077-4257-8efb-bf6e8d516ab7 for instance with vm_state shelved_offloaded and task_state spawning. [ 1149.928769] env[68674]: DEBUG oslo_vmware.api [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524becd8-9d68-c821-2043-74d2729ba35d, 'name': SearchDatastore_Task, 'duration_secs': 0.013265} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.929493] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f42e83f8-d0e6-4390-b3c1-7b93c29f66c6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.934652] env[68674]: DEBUG oslo_vmware.api [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Waiting for the task: (returnval){ [ 1149.934652] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5265c869-90b2-612e-16f9-c6328f52f33a" [ 1149.934652] env[68674]: _type = "Task" [ 1149.934652] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.942676] env[68674]: DEBUG oslo_vmware.api [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5265c869-90b2-612e-16f9-c6328f52f33a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.958591] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquiring lock "refresh_cache-ba4bfbb4-a89b-4ab6-964e-792647fd5a89" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.958766] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquired lock "refresh_cache-ba4bfbb4-a89b-4ab6-964e-792647fd5a89" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1149.958944] env[68674]: DEBUG nova.network.neutron [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1150.124064] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1150.124414] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-970082b0-9690-40d0-9f46-eec339705188 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.131326] env[68674]: DEBUG oslo_vmware.api [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1150.131326] env[68674]: value = "task-3240933" [ 1150.131326] env[68674]: _type = "Task" [ 1150.131326] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.139149] env[68674]: DEBUG oslo_vmware.api [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240933, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.357098] env[68674]: DEBUG oslo_concurrency.lockutils [req-c01d6567-ff91-4032-81ec-22700b5fa8ec req-6b5c244a-10e4-452f-8de4-33398f761560 service nova] Releasing lock "refresh_cache-c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1150.445627] env[68674]: DEBUG oslo_vmware.api [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5265c869-90b2-612e-16f9-c6328f52f33a, 'name': SearchDatastore_Task, 'duration_secs': 0.011786} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.445940] env[68674]: DEBUG oslo_concurrency.lockutils [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1150.446253] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 2efe81dd-caa3-4fde-8be0-fbf399ce99e0/2efe81dd-caa3-4fde-8be0-fbf399ce99e0.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1150.446551] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-37c5112e-6551-4672-a5c5-d4f5d6fc7c82 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.453036] env[68674]: DEBUG oslo_vmware.api [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Waiting for the task: (returnval){ [ 1150.453036] env[68674]: value = "task-3240934" [ 1150.453036] env[68674]: _type = "Task" [ 1150.453036] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.461327] env[68674]: DEBUG oslo_vmware.api [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': task-3240934, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.645914] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] VM already powered off {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1150.646179] env[68674]: DEBUG nova.compute.manager [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1150.647149] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-779b6058-689c-40e1-83d9-1deccbe3f6f5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.654415] env[68674]: DEBUG oslo_concurrency.lockutils [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "refresh_cache-6cf18175-1436-4ba5-b4b3-8641ec6bdad1" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1150.654604] env[68674]: DEBUG oslo_concurrency.lockutils [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquired lock "refresh_cache-6cf18175-1436-4ba5-b4b3-8641ec6bdad1" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1150.654809] env[68674]: DEBUG nova.network.neutron [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1150.679980] env[68674]: DEBUG nova.network.neutron [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Updating instance_info_cache with network_info: [{"id": "7309bd2b-c077-4257-8efb-bf6e8d516ab7", "address": "fa:16:3e:e4:5d:a7", "network": {"id": "e4b29de6-94e6-452e-b362-eb8d7dd615b9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2121858122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2bca98e5a30741249b1bdee899ffe433", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721c6720-3ce0-450e-9951-a894f03acc27", "external-id": "nsx-vlan-transportzone-394", "segmentation_id": 394, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7309bd2b-c0", "ovs_interfaceid": "7309bd2b-c077-4257-8efb-bf6e8d516ab7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.965173] env[68674]: DEBUG oslo_vmware.api [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': task-3240934, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.184247] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Releasing lock "refresh_cache-ba4bfbb4-a89b-4ab6-964e-792647fd5a89" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1151.216224] env[68674]: DEBUG nova.virt.hardware [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='a3860dcb1048f07adda4da1c9d351c62',container_format='bare',created_at=2025-04-03T08:16:17Z,direct_url=,disk_format='vmdk',id=5bc5480f-c581-4561-8a80-b3996f994c28,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-715671129-shelved',owner='2bca98e5a30741249b1bdee899ffe433',properties=ImageMetaProps,protected=,size=31668736,status='active',tags=,updated_at=2025-04-03T08:16:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1151.216508] env[68674]: DEBUG nova.virt.hardware [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1151.216671] env[68674]: DEBUG nova.virt.hardware [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1151.216855] env[68674]: DEBUG nova.virt.hardware [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1151.217013] env[68674]: DEBUG nova.virt.hardware [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1151.217172] env[68674]: DEBUG nova.virt.hardware [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1151.217397] env[68674]: DEBUG nova.virt.hardware [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1151.217561] env[68674]: DEBUG nova.virt.hardware [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1151.217731] env[68674]: DEBUG nova.virt.hardware [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1151.217895] env[68674]: DEBUG nova.virt.hardware [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1151.218100] env[68674]: DEBUG nova.virt.hardware [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1151.219317] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7010455f-e48b-44ec-a860-1d097cb20aca {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.230368] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b992433d-b19b-45aa-966b-20210a06b5b7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.245960] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:5d:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '721c6720-3ce0-450e-9951-a894f03acc27', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7309bd2b-c077-4257-8efb-bf6e8d516ab7', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1151.253715] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1151.256518] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1151.256736] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fb8f614d-ebd8-4a42-ac17-0de3f4321885 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.277463] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1151.277463] env[68674]: value = "task-3240935" [ 1151.277463] env[68674]: _type = "Task" [ 1151.277463] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.284804] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240935, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.397375] env[68674]: DEBUG nova.network.neutron [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Updating instance_info_cache with network_info: [{"id": "8f09d5ee-2ec2-4438-98cc-36bdc79d150b", "address": "fa:16:3e:ba:6d:d0", "network": {"id": "14f41484-287c-4789-9e0c-fcc5f0e92e0d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-787923662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81afe76c94de4e94b53f15af0ef95e66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "877ed63d-906e-4bd5-a1fc-7e82d172d41e", "external-id": "nsx-vlan-transportzone-642", "segmentation_id": 642, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f09d5ee-2e", "ovs_interfaceid": "8f09d5ee-2ec2-4438-98cc-36bdc79d150b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1151.465614] env[68674]: DEBUG oslo_vmware.api [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': task-3240934, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.532045} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.466898] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 2efe81dd-caa3-4fde-8be0-fbf399ce99e0/2efe81dd-caa3-4fde-8be0-fbf399ce99e0.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1151.466898] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1151.466898] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ba131c42-127b-40ff-a7db-e5f27a92f672 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.473924] env[68674]: DEBUG oslo_vmware.api [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Waiting for the task: (returnval){ [ 1151.473924] env[68674]: value = "task-3240936" [ 1151.473924] env[68674]: _type = "Task" [ 1151.473924] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.484104] env[68674]: DEBUG oslo_vmware.api [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': task-3240936, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.787031] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240935, 'name': CreateVM_Task, 'duration_secs': 0.506634} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.787232] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1151.787923] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/5bc5480f-c581-4561-8a80-b3996f994c28" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.788107] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquired lock "[datastore1] devstack-image-cache_base/5bc5480f-c581-4561-8a80-b3996f994c28" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1151.788483] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/5bc5480f-c581-4561-8a80-b3996f994c28" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1151.788751] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39117436-32a8-42af-896a-5e5c75ca8cf0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.793299] env[68674]: DEBUG oslo_vmware.api [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 1151.793299] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d4578e-c4b2-8b4b-6668-7f3314e205f9" [ 1151.793299] env[68674]: _type = "Task" [ 1151.793299] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.801432] env[68674]: DEBUG oslo_vmware.api [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d4578e-c4b2-8b4b-6668-7f3314e205f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.870272] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff1f3bf0-1b37-444f-8a8a-3eca7126ae4c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Volume attach. Driver type: vmdk {{(pid=68674) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1151.870511] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff1f3bf0-1b37-444f-8a8a-3eca7126ae4c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647702', 'volume_id': '7b439213-1295-4de8-b800-8ba487a72af9', 'name': 'volume-7b439213-1295-4de8-b800-8ba487a72af9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5384c82b-a584-430f-8ef1-e2731562b5ff', 'attached_at': '', 'detached_at': '', 'volume_id': '7b439213-1295-4de8-b800-8ba487a72af9', 'serial': '7b439213-1295-4de8-b800-8ba487a72af9'} {{(pid=68674) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1151.872534] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6af79a7-89c0-4481-b143-733d40393de2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.877156] env[68674]: DEBUG oslo_vmware.rw_handles [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bfcdca-25b9-c25e-1e5b-b630a40f41ea/disk-0.vmdk. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1151.877974] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc95448c-9d25-44f7-86a3-8e40cb9c7229 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.883729] env[68674]: DEBUG oslo_vmware.rw_handles [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bfcdca-25b9-c25e-1e5b-b630a40f41ea/disk-0.vmdk is in state: ready. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1151.883902] env[68674]: ERROR oslo_vmware.rw_handles [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bfcdca-25b9-c25e-1e5b-b630a40f41ea/disk-0.vmdk due to incomplete transfer. [ 1151.895077] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-eeeac512-7a99-4f00-b2af-8ed01480106d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.897549] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-227e9e46-64f8-4f84-aeb5-66c4df0456f0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.901502] env[68674]: DEBUG oslo_concurrency.lockutils [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Releasing lock "refresh_cache-6cf18175-1436-4ba5-b4b3-8641ec6bdad1" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1151.906086] env[68674]: DEBUG oslo_vmware.rw_handles [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bfcdca-25b9-c25e-1e5b-b630a40f41ea/disk-0.vmdk. {{(pid=68674) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1151.906280] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Uploaded image 4567b890-657d-4354-bb3d-083e8acf9038 to the Glance image server {{(pid=68674) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1151.908137] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Destroying the VM {{(pid=68674) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1151.921560] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-9ac25211-a96e-42d6-bdab-a90f0c4003f6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.931068] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff1f3bf0-1b37-444f-8a8a-3eca7126ae4c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Reconfiguring VM instance instance-0000006f to attach disk [datastore2] volume-7b439213-1295-4de8-b800-8ba487a72af9/volume-7b439213-1295-4de8-b800-8ba487a72af9.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1151.932831] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-19cd1ec0-8aab-4ed0-812b-4e9272c4596f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.948283] env[68674]: DEBUG nova.compute.manager [req-de3f0a36-bbd1-4429-b639-a66aa8cdbc64 req-1ea336fa-a849-4ad8-88e0-8f33abff2492 service nova] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Received event network-changed-7309bd2b-c077-4257-8efb-bf6e8d516ab7 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1151.948604] env[68674]: DEBUG nova.compute.manager [req-de3f0a36-bbd1-4429-b639-a66aa8cdbc64 req-1ea336fa-a849-4ad8-88e0-8f33abff2492 service nova] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Refreshing instance network info cache due to event network-changed-7309bd2b-c077-4257-8efb-bf6e8d516ab7. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1151.948924] env[68674]: DEBUG oslo_concurrency.lockutils [req-de3f0a36-bbd1-4429-b639-a66aa8cdbc64 req-1ea336fa-a849-4ad8-88e0-8f33abff2492 service nova] Acquiring lock "refresh_cache-ba4bfbb4-a89b-4ab6-964e-792647fd5a89" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.949197] env[68674]: DEBUG oslo_concurrency.lockutils [req-de3f0a36-bbd1-4429-b639-a66aa8cdbc64 req-1ea336fa-a849-4ad8-88e0-8f33abff2492 service nova] Acquired lock "refresh_cache-ba4bfbb4-a89b-4ab6-964e-792647fd5a89" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1151.949479] env[68674]: DEBUG nova.network.neutron [req-de3f0a36-bbd1-4429-b639-a66aa8cdbc64 req-1ea336fa-a849-4ad8-88e0-8f33abff2492 service nova] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Refreshing network info cache for port 7309bd2b-c077-4257-8efb-bf6e8d516ab7 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1151.955806] env[68674]: DEBUG oslo_vmware.api [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1151.955806] env[68674]: value = "task-3240937" [ 1151.955806] env[68674]: _type = "Task" [ 1151.955806] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.957132] env[68674]: DEBUG oslo_vmware.api [None req-ff1f3bf0-1b37-444f-8a8a-3eca7126ae4c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1151.957132] env[68674]: value = "task-3240938" [ 1151.957132] env[68674]: _type = "Task" [ 1151.957132] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.970990] env[68674]: DEBUG oslo_vmware.api [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240937, 'name': Destroy_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.974316] env[68674]: DEBUG oslo_vmware.api [None req-ff1f3bf0-1b37-444f-8a8a-3eca7126ae4c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240938, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.982083] env[68674]: DEBUG oslo_vmware.api [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': task-3240936, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065779} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.982374] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1151.983146] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26e84846-2e2d-4982-8c4c-04aba74855f7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.009624] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Reconfiguring VM instance instance-00000076 to attach disk [datastore2] 2efe81dd-caa3-4fde-8be0-fbf399ce99e0/2efe81dd-caa3-4fde-8be0-fbf399ce99e0.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1152.012304] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9a2467fa-1ac4-4b17-afdc-9c92811093e9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.033932] env[68674]: DEBUG oslo_vmware.api [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Waiting for the task: (returnval){ [ 1152.033932] env[68674]: value = "task-3240939" [ 1152.033932] env[68674]: _type = "Task" [ 1152.033932] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.042744] env[68674]: DEBUG oslo_vmware.api [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': task-3240939, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.212926] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1152.213877] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e7fef18-cd54-46e1-a497-3a0865a54bcb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.222107] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1152.222107] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cfdd0f14-1943-4faa-a7eb-2f78fe175d82 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.305446] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Releasing lock "[datastore1] devstack-image-cache_base/5bc5480f-c581-4561-8a80-b3996f994c28" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1152.305709] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Processing image 5bc5480f-c581-4561-8a80-b3996f994c28 {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1152.305966] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/5bc5480f-c581-4561-8a80-b3996f994c28/5bc5480f-c581-4561-8a80-b3996f994c28.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1152.306134] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquired lock "[datastore1] devstack-image-cache_base/5bc5480f-c581-4561-8a80-b3996f994c28/5bc5480f-c581-4561-8a80-b3996f994c28.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1152.306354] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1152.306672] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7a89c2a9-eeef-4776-a1b2-a79882c4c44c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.310206] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1152.310486] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1152.310642] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Deleting the datastore file [datastore2] 6cf18175-1436-4ba5-b4b3-8641ec6bdad1 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1152.311349] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ef73a4a1-9700-4cc4-9daf-e60427349338 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.318404] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1152.318607] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1152.320352] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e7f9f2a-7632-49c6-9a24-c4772fc1d99c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.322753] env[68674]: DEBUG oslo_vmware.api [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1152.322753] env[68674]: value = "task-3240941" [ 1152.322753] env[68674]: _type = "Task" [ 1152.322753] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.327627] env[68674]: DEBUG oslo_vmware.api [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 1152.327627] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528ed30c-26f0-695f-36e9-cc3c3a5fa390" [ 1152.327627] env[68674]: _type = "Task" [ 1152.327627] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.333924] env[68674]: DEBUG oslo_vmware.api [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240941, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.338271] env[68674]: DEBUG oslo_vmware.api [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528ed30c-26f0-695f-36e9-cc3c3a5fa390, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.468730] env[68674]: DEBUG oslo_vmware.api [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240937, 'name': Destroy_Task, 'duration_secs': 0.388722} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.471566] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Destroyed the VM [ 1152.471816] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Deleting Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1152.472080] env[68674]: DEBUG oslo_vmware.api [None req-ff1f3bf0-1b37-444f-8a8a-3eca7126ae4c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240938, 'name': ReconfigVM_Task, 'duration_secs': 0.453419} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.472307] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-88641b8d-15a2-425f-baf7-b74bcf4bedc0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.473810] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff1f3bf0-1b37-444f-8a8a-3eca7126ae4c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Reconfigured VM instance instance-0000006f to attach disk [datastore2] volume-7b439213-1295-4de8-b800-8ba487a72af9/volume-7b439213-1295-4de8-b800-8ba487a72af9.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1152.478394] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2d9daae9-6c80-41f3-9c24-c36855812f52 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.494021] env[68674]: DEBUG oslo_vmware.api [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1152.494021] env[68674]: value = "task-3240942" [ 1152.494021] env[68674]: _type = "Task" [ 1152.494021] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.498215] env[68674]: DEBUG oslo_vmware.api [None req-ff1f3bf0-1b37-444f-8a8a-3eca7126ae4c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1152.498215] env[68674]: value = "task-3240943" [ 1152.498215] env[68674]: _type = "Task" [ 1152.498215] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.504369] env[68674]: DEBUG oslo_vmware.api [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240942, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.509186] env[68674]: DEBUG oslo_vmware.api [None req-ff1f3bf0-1b37-444f-8a8a-3eca7126ae4c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240943, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.543837] env[68674]: DEBUG oslo_vmware.api [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': task-3240939, 'name': ReconfigVM_Task, 'duration_secs': 0.325163} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.544191] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Reconfigured VM instance instance-00000076 to attach disk [datastore2] 2efe81dd-caa3-4fde-8be0-fbf399ce99e0/2efe81dd-caa3-4fde-8be0-fbf399ce99e0.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1152.544787] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c63c3046-10de-4724-a6fc-3be45289bad3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.551068] env[68674]: DEBUG oslo_vmware.api [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Waiting for the task: (returnval){ [ 1152.551068] env[68674]: value = "task-3240944" [ 1152.551068] env[68674]: _type = "Task" [ 1152.551068] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.559253] env[68674]: DEBUG oslo_vmware.api [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': task-3240944, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.781809] env[68674]: DEBUG nova.network.neutron [req-de3f0a36-bbd1-4429-b639-a66aa8cdbc64 req-1ea336fa-a849-4ad8-88e0-8f33abff2492 service nova] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Updated VIF entry in instance network info cache for port 7309bd2b-c077-4257-8efb-bf6e8d516ab7. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1152.782212] env[68674]: DEBUG nova.network.neutron [req-de3f0a36-bbd1-4429-b639-a66aa8cdbc64 req-1ea336fa-a849-4ad8-88e0-8f33abff2492 service nova] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Updating instance_info_cache with network_info: [{"id": "7309bd2b-c077-4257-8efb-bf6e8d516ab7", "address": "fa:16:3e:e4:5d:a7", "network": {"id": "e4b29de6-94e6-452e-b362-eb8d7dd615b9", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2121858122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2bca98e5a30741249b1bdee899ffe433", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721c6720-3ce0-450e-9951-a894f03acc27", "external-id": "nsx-vlan-transportzone-394", "segmentation_id": 394, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7309bd2b-c0", "ovs_interfaceid": "7309bd2b-c077-4257-8efb-bf6e8d516ab7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1152.834186] env[68674]: DEBUG oslo_vmware.api [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240941, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.180612} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.834803] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1152.834999] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1152.835199] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1152.840280] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Preparing fetch location {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1152.840515] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Fetch image to [datastore1] OSTACK_IMG_be2dbcc5-c3fe-4dc9-9908-264af173dcdb/OSTACK_IMG_be2dbcc5-c3fe-4dc9-9908-264af173dcdb.vmdk {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1152.840919] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Downloading stream optimized image 5bc5480f-c581-4561-8a80-b3996f994c28 to [datastore1] OSTACK_IMG_be2dbcc5-c3fe-4dc9-9908-264af173dcdb/OSTACK_IMG_be2dbcc5-c3fe-4dc9-9908-264af173dcdb.vmdk on the data store datastore1 as vApp {{(pid=68674) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1152.840919] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Downloading image file data 5bc5480f-c581-4561-8a80-b3996f994c28 to the ESX as VM named 'OSTACK_IMG_be2dbcc5-c3fe-4dc9-9908-264af173dcdb' {{(pid=68674) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1152.861865] env[68674]: INFO nova.scheduler.client.report [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Deleted allocations for instance 6cf18175-1436-4ba5-b4b3-8641ec6bdad1 [ 1152.919030] env[68674]: DEBUG oslo_vmware.rw_handles [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1152.919030] env[68674]: value = "resgroup-9" [ 1152.919030] env[68674]: _type = "ResourcePool" [ 1152.919030] env[68674]: }. {{(pid=68674) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1152.919340] env[68674]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-097bead6-d298-4e26-a854-87b6fdeaa7e5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.941273] env[68674]: DEBUG oslo_vmware.rw_handles [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lease: (returnval){ [ 1152.941273] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5282d33b-d7ff-3024-cd78-bddfcec7254c" [ 1152.941273] env[68674]: _type = "HttpNfcLease" [ 1152.941273] env[68674]: } obtained for vApp import into resource pool (val){ [ 1152.941273] env[68674]: value = "resgroup-9" [ 1152.941273] env[68674]: _type = "ResourcePool" [ 1152.941273] env[68674]: }. {{(pid=68674) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1152.941616] env[68674]: DEBUG oslo_vmware.api [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the lease: (returnval){ [ 1152.941616] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5282d33b-d7ff-3024-cd78-bddfcec7254c" [ 1152.941616] env[68674]: _type = "HttpNfcLease" [ 1152.941616] env[68674]: } to be ready. {{(pid=68674) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1152.948368] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1152.948368] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5282d33b-d7ff-3024-cd78-bddfcec7254c" [ 1152.948368] env[68674]: _type = "HttpNfcLease" [ 1152.948368] env[68674]: } is initializing. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1153.006213] env[68674]: DEBUG oslo_vmware.api [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240942, 'name': RemoveSnapshot_Task, 'duration_secs': 0.392894} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.006932] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Deleted Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1153.007261] env[68674]: DEBUG nova.compute.manager [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1153.008034] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fd0db85-9892-47bc-84e3-50dfa7baf0e0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.013178] env[68674]: DEBUG oslo_vmware.api [None req-ff1f3bf0-1b37-444f-8a8a-3eca7126ae4c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240943, 'name': ReconfigVM_Task, 'duration_secs': 0.153642} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.013759] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff1f3bf0-1b37-444f-8a8a-3eca7126ae4c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647702', 'volume_id': '7b439213-1295-4de8-b800-8ba487a72af9', 'name': 'volume-7b439213-1295-4de8-b800-8ba487a72af9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5384c82b-a584-430f-8ef1-e2731562b5ff', 'attached_at': '', 'detached_at': '', 'volume_id': '7b439213-1295-4de8-b800-8ba487a72af9', 'serial': '7b439213-1295-4de8-b800-8ba487a72af9'} {{(pid=68674) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1153.061443] env[68674]: DEBUG oslo_vmware.api [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': task-3240944, 'name': Rename_Task, 'duration_secs': 0.153392} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.061747] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1153.062041] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-163edee2-58aa-43e5-9efb-ab634855d05b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.068253] env[68674]: DEBUG oslo_vmware.api [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Waiting for the task: (returnval){ [ 1153.068253] env[68674]: value = "task-3240946" [ 1153.068253] env[68674]: _type = "Task" [ 1153.068253] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.075836] env[68674]: DEBUG oslo_vmware.api [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': task-3240946, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.285299] env[68674]: DEBUG oslo_concurrency.lockutils [req-de3f0a36-bbd1-4429-b639-a66aa8cdbc64 req-1ea336fa-a849-4ad8-88e0-8f33abff2492 service nova] Releasing lock "refresh_cache-ba4bfbb4-a89b-4ab6-964e-792647fd5a89" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1153.382573] env[68674]: DEBUG oslo_concurrency.lockutils [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1153.382845] env[68674]: DEBUG oslo_concurrency.lockutils [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1153.383104] env[68674]: DEBUG nova.objects.instance [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lazy-loading 'resources' on Instance uuid 6cf18175-1436-4ba5-b4b3-8641ec6bdad1 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1153.449919] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1153.449919] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5282d33b-d7ff-3024-cd78-bddfcec7254c" [ 1153.449919] env[68674]: _type = "HttpNfcLease" [ 1153.449919] env[68674]: } is initializing. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1153.523119] env[68674]: INFO nova.compute.manager [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Shelve offloading [ 1153.577891] env[68674]: DEBUG oslo_vmware.api [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': task-3240946, 'name': PowerOnVM_Task} progress is 90%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.886409] env[68674]: DEBUG nova.objects.instance [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lazy-loading 'numa_topology' on Instance uuid 6cf18175-1436-4ba5-b4b3-8641ec6bdad1 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1153.944483] env[68674]: DEBUG nova.compute.manager [req-283d49a1-64e8-46e6-a89d-add9bddde1ea req-f7c5a1eb-acad-43cc-a703-d86763943c5a service nova] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Received event network-vif-unplugged-8f09d5ee-2ec2-4438-98cc-36bdc79d150b {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1153.944711] env[68674]: DEBUG oslo_concurrency.lockutils [req-283d49a1-64e8-46e6-a89d-add9bddde1ea req-f7c5a1eb-acad-43cc-a703-d86763943c5a service nova] Acquiring lock "6cf18175-1436-4ba5-b4b3-8641ec6bdad1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1153.944928] env[68674]: DEBUG oslo_concurrency.lockutils [req-283d49a1-64e8-46e6-a89d-add9bddde1ea req-f7c5a1eb-acad-43cc-a703-d86763943c5a service nova] Lock "6cf18175-1436-4ba5-b4b3-8641ec6bdad1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1153.945118] env[68674]: DEBUG oslo_concurrency.lockutils [req-283d49a1-64e8-46e6-a89d-add9bddde1ea req-f7c5a1eb-acad-43cc-a703-d86763943c5a service nova] Lock "6cf18175-1436-4ba5-b4b3-8641ec6bdad1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1153.945292] env[68674]: DEBUG nova.compute.manager [req-283d49a1-64e8-46e6-a89d-add9bddde1ea req-f7c5a1eb-acad-43cc-a703-d86763943c5a service nova] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] No waiting events found dispatching network-vif-unplugged-8f09d5ee-2ec2-4438-98cc-36bdc79d150b {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1153.945511] env[68674]: WARNING nova.compute.manager [req-283d49a1-64e8-46e6-a89d-add9bddde1ea req-f7c5a1eb-acad-43cc-a703-d86763943c5a service nova] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Received unexpected event network-vif-unplugged-8f09d5ee-2ec2-4438-98cc-36bdc79d150b for instance with vm_state shelved_offloaded and task_state None. [ 1153.945689] env[68674]: DEBUG nova.compute.manager [req-283d49a1-64e8-46e6-a89d-add9bddde1ea req-f7c5a1eb-acad-43cc-a703-d86763943c5a service nova] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Received event network-changed-8f09d5ee-2ec2-4438-98cc-36bdc79d150b {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1153.945845] env[68674]: DEBUG nova.compute.manager [req-283d49a1-64e8-46e6-a89d-add9bddde1ea req-f7c5a1eb-acad-43cc-a703-d86763943c5a service nova] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Refreshing instance network info cache due to event network-changed-8f09d5ee-2ec2-4438-98cc-36bdc79d150b. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1153.946081] env[68674]: DEBUG oslo_concurrency.lockutils [req-283d49a1-64e8-46e6-a89d-add9bddde1ea req-f7c5a1eb-acad-43cc-a703-d86763943c5a service nova] Acquiring lock "refresh_cache-6cf18175-1436-4ba5-b4b3-8641ec6bdad1" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.946229] env[68674]: DEBUG oslo_concurrency.lockutils [req-283d49a1-64e8-46e6-a89d-add9bddde1ea req-f7c5a1eb-acad-43cc-a703-d86763943c5a service nova] Acquired lock "refresh_cache-6cf18175-1436-4ba5-b4b3-8641ec6bdad1" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1153.946393] env[68674]: DEBUG nova.network.neutron [req-283d49a1-64e8-46e6-a89d-add9bddde1ea req-f7c5a1eb-acad-43cc-a703-d86763943c5a service nova] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Refreshing network info cache for port 8f09d5ee-2ec2-4438-98cc-36bdc79d150b {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1153.954972] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1153.954972] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5282d33b-d7ff-3024-cd78-bddfcec7254c" [ 1153.954972] env[68674]: _type = "HttpNfcLease" [ 1153.954972] env[68674]: } is ready. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1153.955743] env[68674]: DEBUG oslo_vmware.rw_handles [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1153.955743] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5282d33b-d7ff-3024-cd78-bddfcec7254c" [ 1153.955743] env[68674]: _type = "HttpNfcLease" [ 1153.955743] env[68674]: }. {{(pid=68674) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1153.956470] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb653820-7cbf-422d-b3f4-d33bff7b7529 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.964222] env[68674]: DEBUG oslo_vmware.rw_handles [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52cd2ebc-a5c9-06ec-bfff-dd404c567337/disk-0.vmdk from lease info. {{(pid=68674) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1153.964397] env[68674]: DEBUG oslo_vmware.rw_handles [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Creating HTTP connection to write to file with size = 31668736 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52cd2ebc-a5c9-06ec-bfff-dd404c567337/disk-0.vmdk. {{(pid=68674) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1154.026988] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1154.031075] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-98659239-4bb1-469c-b442-78e3ab3f9902 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.033562] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-5c87354b-aee5-4b50-a3eb-cea194b9e26a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.041169] env[68674]: DEBUG oslo_vmware.api [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1154.041169] env[68674]: value = "task-3240947" [ 1154.041169] env[68674]: _type = "Task" [ 1154.041169] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.049319] env[68674]: DEBUG oslo_vmware.api [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240947, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.051502] env[68674]: DEBUG nova.objects.instance [None req-ff1f3bf0-1b37-444f-8a8a-3eca7126ae4c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lazy-loading 'flavor' on Instance uuid 5384c82b-a584-430f-8ef1-e2731562b5ff {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1154.080240] env[68674]: DEBUG oslo_vmware.api [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': task-3240946, 'name': PowerOnVM_Task, 'duration_secs': 0.596619} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.080509] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1154.080706] env[68674]: INFO nova.compute.manager [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Took 14.19 seconds to spawn the instance on the hypervisor. [ 1154.080884] env[68674]: DEBUG nova.compute.manager [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1154.081671] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3db2431a-3212-43ba-8b49-575401de354a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.389147] env[68674]: DEBUG nova.objects.base [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Object Instance<6cf18175-1436-4ba5-b4b3-8641ec6bdad1> lazy-loaded attributes: resources,numa_topology {{(pid=68674) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1154.539090] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dc575bb-52e4-4971-ac80-663cad551f24 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.553687] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6e01ad3-7c9e-4c85-9e2c-fef7519c3205 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.561061] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] VM already powered off {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1154.561061] env[68674]: DEBUG nova.compute.manager [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1154.561061] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ced27071-180a-4eb9-a018-1593a483da32 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.565190] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ff1f3bf0-1b37-444f-8a8a-3eca7126ae4c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "5384c82b-a584-430f-8ef1-e2731562b5ff" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.345s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1154.590429] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "5384c82b-a584-430f-8ef1-e2731562b5ff" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 5.914s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1154.596427] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-582231e6-f0b6-4a15-90e6-b34846088e00 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.600784] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-719657d8-b6bf-43ed-9396-e959ac522665 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.607656] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquiring lock "refresh_cache-9b8aad00-0980-4752-954a-c09c9ae6f9ec" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1154.608027] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquired lock "refresh_cache-9b8aad00-0980-4752-954a-c09c9ae6f9ec" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1154.608332] env[68674]: DEBUG nova.network.neutron [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1154.609708] env[68674]: INFO nova.compute.manager [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Took 22.49 seconds to build instance. [ 1154.624022] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30f86f7a-206c-4f80-b1a2-54ca4192195f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.641024] env[68674]: DEBUG nova.compute.provider_tree [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1154.742130] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00e43eb0-92c1-4bd0-8f04-d9fda73618cb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "6cf18175-1436-4ba5-b4b3-8641ec6bdad1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1154.764031] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a7e97580-e7bb-4988-a4c7-01f30ee20d42 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "5384c82b-a584-430f-8ef1-e2731562b5ff" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1154.777281] env[68674]: DEBUG nova.network.neutron [req-283d49a1-64e8-46e6-a89d-add9bddde1ea req-f7c5a1eb-acad-43cc-a703-d86763943c5a service nova] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Updated VIF entry in instance network info cache for port 8f09d5ee-2ec2-4438-98cc-36bdc79d150b. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1154.777672] env[68674]: DEBUG nova.network.neutron [req-283d49a1-64e8-46e6-a89d-add9bddde1ea req-f7c5a1eb-acad-43cc-a703-d86763943c5a service nova] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Updating instance_info_cache with network_info: [{"id": "8f09d5ee-2ec2-4438-98cc-36bdc79d150b", "address": "fa:16:3e:ba:6d:d0", "network": {"id": "14f41484-287c-4789-9e0c-fcc5f0e92e0d", "bridge": null, "label": "tempest-DeleteServersTestJSON-787923662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81afe76c94de4e94b53f15af0ef95e66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap8f09d5ee-2e", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1155.116257] env[68674]: DEBUG oslo_concurrency.lockutils [None req-065a4633-6a84-41bc-9b45-7c4769f2a896 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Lock "2efe81dd-caa3-4fde-8be0-fbf399ce99e0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.003s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1155.116667] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "2efe81dd-caa3-4fde-8be0-fbf399ce99e0" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 6.438s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1155.116885] env[68674]: INFO nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] During sync_power_state the instance has a pending task (spawning). Skip. [ 1155.117075] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "2efe81dd-caa3-4fde-8be0-fbf399ce99e0" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1155.129930] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "5384c82b-a584-430f-8ef1-e2731562b5ff" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.539s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1155.130256] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a7e97580-e7bb-4988-a4c7-01f30ee20d42 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "5384c82b-a584-430f-8ef1-e2731562b5ff" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.366s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1155.144051] env[68674]: DEBUG nova.scheduler.client.report [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1155.282166] env[68674]: DEBUG oslo_concurrency.lockutils [req-283d49a1-64e8-46e6-a89d-add9bddde1ea req-f7c5a1eb-acad-43cc-a703-d86763943c5a service nova] Releasing lock "refresh_cache-6cf18175-1436-4ba5-b4b3-8641ec6bdad1" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1155.364328] env[68674]: DEBUG nova.network.neutron [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Updating instance_info_cache with network_info: [{"id": "f751e885-e868-4e41-a9e7-de64b20c643c", "address": "fa:16:3e:cb:db:79", "network": {"id": "dd6a13cc-564e-4e30-a518-536c9c1a1c8d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2104984174-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fcfc3ecd6aa74705aefa88d7a95361a0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf751e885-e8", "ovs_interfaceid": "f751e885-e868-4e41-a9e7-de64b20c643c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1155.516965] env[68674]: DEBUG oslo_vmware.rw_handles [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Completed reading data from the image iterator. {{(pid=68674) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1155.517221] env[68674]: DEBUG oslo_vmware.rw_handles [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52cd2ebc-a5c9-06ec-bfff-dd404c567337/disk-0.vmdk. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1155.518460] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3534035a-52ba-4682-bb59-2ffb954451d1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.526023] env[68674]: DEBUG oslo_vmware.rw_handles [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52cd2ebc-a5c9-06ec-bfff-dd404c567337/disk-0.vmdk is in state: ready. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1155.526023] env[68674]: DEBUG oslo_vmware.rw_handles [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52cd2ebc-a5c9-06ec-bfff-dd404c567337/disk-0.vmdk. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1155.526191] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-107ee96d-4123-495a-bf61-b1532dc76718 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.594508] env[68674]: DEBUG oslo_concurrency.lockutils [None req-35e9d649-89dc-45c8-9ac6-9d783fc78aef tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Acquiring lock "2efe81dd-caa3-4fde-8be0-fbf399ce99e0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1155.594811] env[68674]: DEBUG oslo_concurrency.lockutils [None req-35e9d649-89dc-45c8-9ac6-9d783fc78aef tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Lock "2efe81dd-caa3-4fde-8be0-fbf399ce99e0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1155.595043] env[68674]: DEBUG oslo_concurrency.lockutils [None req-35e9d649-89dc-45c8-9ac6-9d783fc78aef tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Acquiring lock "2efe81dd-caa3-4fde-8be0-fbf399ce99e0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1155.595234] env[68674]: DEBUG oslo_concurrency.lockutils [None req-35e9d649-89dc-45c8-9ac6-9d783fc78aef tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Lock "2efe81dd-caa3-4fde-8be0-fbf399ce99e0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1155.595407] env[68674]: DEBUG oslo_concurrency.lockutils [None req-35e9d649-89dc-45c8-9ac6-9d783fc78aef tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Lock "2efe81dd-caa3-4fde-8be0-fbf399ce99e0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1155.598060] env[68674]: INFO nova.compute.manager [None req-35e9d649-89dc-45c8-9ac6-9d783fc78aef tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Terminating instance [ 1155.633923] env[68674]: INFO nova.compute.manager [None req-a7e97580-e7bb-4988-a4c7-01f30ee20d42 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Detaching volume 7b439213-1295-4de8-b800-8ba487a72af9 [ 1155.650296] env[68674]: DEBUG oslo_concurrency.lockutils [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.267s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1155.668145] env[68674]: INFO nova.virt.block_device [None req-a7e97580-e7bb-4988-a4c7-01f30ee20d42 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Attempting to driver detach volume 7b439213-1295-4de8-b800-8ba487a72af9 from mountpoint /dev/sdb [ 1155.668385] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7e97580-e7bb-4988-a4c7-01f30ee20d42 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Volume detach. Driver type: vmdk {{(pid=68674) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1155.668573] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7e97580-e7bb-4988-a4c7-01f30ee20d42 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647702', 'volume_id': '7b439213-1295-4de8-b800-8ba487a72af9', 'name': 'volume-7b439213-1295-4de8-b800-8ba487a72af9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5384c82b-a584-430f-8ef1-e2731562b5ff', 'attached_at': '', 'detached_at': '', 'volume_id': '7b439213-1295-4de8-b800-8ba487a72af9', 'serial': '7b439213-1295-4de8-b800-8ba487a72af9'} {{(pid=68674) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1155.669451] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4779c54d-6330-47f8-899c-869e03c19632 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.692785] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d7035d-0762-40e3-9aba-6e31ee1d3b3f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.699883] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77b5d3a9-e377-4c73-b632-078b17f102f4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.719688] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-151ab2f4-a833-40d2-9d96-307146f9a08b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.733607] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7e97580-e7bb-4988-a4c7-01f30ee20d42 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] The volume has not been displaced from its original location: [datastore2] volume-7b439213-1295-4de8-b800-8ba487a72af9/volume-7b439213-1295-4de8-b800-8ba487a72af9.vmdk. No consolidation needed. {{(pid=68674) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1155.740429] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7e97580-e7bb-4988-a4c7-01f30ee20d42 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Reconfiguring VM instance instance-0000006f to detach disk 2001 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1155.740930] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8d163905-91d7-46fe-bd41-c4b5078cff97 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.759288] env[68674]: DEBUG oslo_vmware.api [None req-a7e97580-e7bb-4988-a4c7-01f30ee20d42 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1155.759288] env[68674]: value = "task-3240948" [ 1155.759288] env[68674]: _type = "Task" [ 1155.759288] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.767980] env[68674]: DEBUG oslo_vmware.api [None req-a7e97580-e7bb-4988-a4c7-01f30ee20d42 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240948, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.811625] env[68674]: DEBUG oslo_vmware.rw_handles [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52cd2ebc-a5c9-06ec-bfff-dd404c567337/disk-0.vmdk. {{(pid=68674) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1155.811889] env[68674]: INFO nova.virt.vmwareapi.images [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Downloaded image file data 5bc5480f-c581-4561-8a80-b3996f994c28 [ 1155.812741] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a891c849-e464-4c29-950f-de63d1ce7e9b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.828489] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-81b4fa30-25ac-4780-ab12-43b1e43ad012 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.867242] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Releasing lock "refresh_cache-9b8aad00-0980-4752-954a-c09c9ae6f9ec" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1155.889042] env[68674]: INFO nova.virt.vmwareapi.images [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] The imported VM was unregistered [ 1155.891184] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Caching image {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1155.891427] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Creating directory with path [datastore1] devstack-image-cache_base/5bc5480f-c581-4561-8a80-b3996f994c28 {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1155.891758] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-21c6d6da-41f8-4f04-a957-c0e3b72a8d66 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.903398] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Created directory with path [datastore1] devstack-image-cache_base/5bc5480f-c581-4561-8a80-b3996f994c28 {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1155.903398] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_be2dbcc5-c3fe-4dc9-9908-264af173dcdb/OSTACK_IMG_be2dbcc5-c3fe-4dc9-9908-264af173dcdb.vmdk to [datastore1] devstack-image-cache_base/5bc5480f-c581-4561-8a80-b3996f994c28/5bc5480f-c581-4561-8a80-b3996f994c28.vmdk. {{(pid=68674) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1155.903600] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-57832bf8-b4c5-4a3e-8720-5a5de5ab0a02 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.911465] env[68674]: DEBUG oslo_vmware.api [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 1155.911465] env[68674]: value = "task-3240950" [ 1155.911465] env[68674]: _type = "Task" [ 1155.911465] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.919400] env[68674]: DEBUG oslo_vmware.api [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240950, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.104029] env[68674]: DEBUG nova.compute.manager [None req-35e9d649-89dc-45c8-9ac6-9d783fc78aef tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1156.104029] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-35e9d649-89dc-45c8-9ac6-9d783fc78aef tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1156.104029] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81fa873a-a93f-4d5b-a710-28214bdcc763 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.113061] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-35e9d649-89dc-45c8-9ac6-9d783fc78aef tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1156.113061] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a9da5eda-45d5-4b41-954d-381fcb029c69 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.122026] env[68674]: DEBUG oslo_vmware.api [None req-35e9d649-89dc-45c8-9ac6-9d783fc78aef tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Waiting for the task: (returnval){ [ 1156.122026] env[68674]: value = "task-3240951" [ 1156.122026] env[68674]: _type = "Task" [ 1156.122026] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.128985] env[68674]: DEBUG nova.compute.manager [req-9c982d30-f123-45b7-a445-3b0764d28c4c req-525a923b-df79-4a12-bee4-2ad201191777 service nova] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Received event network-vif-unplugged-f751e885-e868-4e41-a9e7-de64b20c643c {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1156.130024] env[68674]: DEBUG oslo_concurrency.lockutils [req-9c982d30-f123-45b7-a445-3b0764d28c4c req-525a923b-df79-4a12-bee4-2ad201191777 service nova] Acquiring lock "9b8aad00-0980-4752-954a-c09c9ae6f9ec-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1156.130024] env[68674]: DEBUG oslo_concurrency.lockutils [req-9c982d30-f123-45b7-a445-3b0764d28c4c req-525a923b-df79-4a12-bee4-2ad201191777 service nova] Lock "9b8aad00-0980-4752-954a-c09c9ae6f9ec-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1156.130024] env[68674]: DEBUG oslo_concurrency.lockutils [req-9c982d30-f123-45b7-a445-3b0764d28c4c req-525a923b-df79-4a12-bee4-2ad201191777 service nova] Lock "9b8aad00-0980-4752-954a-c09c9ae6f9ec-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1156.130024] env[68674]: DEBUG nova.compute.manager [req-9c982d30-f123-45b7-a445-3b0764d28c4c req-525a923b-df79-4a12-bee4-2ad201191777 service nova] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] No waiting events found dispatching network-vif-unplugged-f751e885-e868-4e41-a9e7-de64b20c643c {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1156.130488] env[68674]: WARNING nova.compute.manager [req-9c982d30-f123-45b7-a445-3b0764d28c4c req-525a923b-df79-4a12-bee4-2ad201191777 service nova] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Received unexpected event network-vif-unplugged-f751e885-e868-4e41-a9e7-de64b20c643c for instance with vm_state shelved and task_state shelving_offloading. [ 1156.136813] env[68674]: DEBUG oslo_vmware.api [None req-35e9d649-89dc-45c8-9ac6-9d783fc78aef tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': task-3240951, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.162788] env[68674]: DEBUG oslo_concurrency.lockutils [None req-01d6c36a-9e86-4428-a5f1-1f440d454f47 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "6cf18175-1436-4ba5-b4b3-8641ec6bdad1" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 24.779s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1156.162899] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "6cf18175-1436-4ba5-b4b3-8641ec6bdad1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 7.485s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1156.163025] env[68674]: INFO nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] During sync_power_state the instance has a pending task (shelving_image_uploading). Skip. [ 1156.163211] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "6cf18175-1436-4ba5-b4b3-8641ec6bdad1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1156.163819] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00e43eb0-92c1-4bd0-8f04-d9fda73618cb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "6cf18175-1436-4ba5-b4b3-8641ec6bdad1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 1.422s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1156.163901] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00e43eb0-92c1-4bd0-8f04-d9fda73618cb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "6cf18175-1436-4ba5-b4b3-8641ec6bdad1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1156.164087] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00e43eb0-92c1-4bd0-8f04-d9fda73618cb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "6cf18175-1436-4ba5-b4b3-8641ec6bdad1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1156.164271] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00e43eb0-92c1-4bd0-8f04-d9fda73618cb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "6cf18175-1436-4ba5-b4b3-8641ec6bdad1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1156.166519] env[68674]: INFO nova.compute.manager [None req-00e43eb0-92c1-4bd0-8f04-d9fda73618cb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Terminating instance [ 1156.174594] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1156.175746] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-575fcf39-027a-4f1f-b169-7f59249686db {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.184748] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1156.185046] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b2f39196-738e-4e8d-ab2f-f7f3e721437e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.271115] env[68674]: DEBUG oslo_vmware.api [None req-a7e97580-e7bb-4988-a4c7-01f30ee20d42 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240948, 'name': ReconfigVM_Task, 'duration_secs': 0.211944} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.271115] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7e97580-e7bb-4988-a4c7-01f30ee20d42 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Reconfigured VM instance instance-0000006f to detach disk 2001 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1156.274460] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-46615856-8bb2-4289-ac36-63e65f66326e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.290410] env[68674]: DEBUG oslo_vmware.api [None req-a7e97580-e7bb-4988-a4c7-01f30ee20d42 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1156.290410] env[68674]: value = "task-3240953" [ 1156.290410] env[68674]: _type = "Task" [ 1156.290410] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.297285] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1156.297665] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1156.297665] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Deleting the datastore file [datastore1] 9b8aad00-0980-4752-954a-c09c9ae6f9ec {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1156.298961] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-68a377e7-d003-4471-919f-7bfb664b2313 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.305254] env[68674]: DEBUG oslo_vmware.api [None req-a7e97580-e7bb-4988-a4c7-01f30ee20d42 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240953, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.311429] env[68674]: DEBUG oslo_vmware.api [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1156.311429] env[68674]: value = "task-3240954" [ 1156.311429] env[68674]: _type = "Task" [ 1156.311429] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.320540] env[68674]: DEBUG oslo_vmware.api [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240954, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.422040] env[68674]: DEBUG oslo_vmware.api [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240950, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.634279] env[68674]: DEBUG oslo_vmware.api [None req-35e9d649-89dc-45c8-9ac6-9d783fc78aef tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': task-3240951, 'name': PowerOffVM_Task, 'duration_secs': 0.263987} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.634673] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-35e9d649-89dc-45c8-9ac6-9d783fc78aef tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1156.634884] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-35e9d649-89dc-45c8-9ac6-9d783fc78aef tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1156.635190] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8c2c272e-b97f-4810-8e31-1a34ea6783ff {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.674364] env[68674]: DEBUG nova.compute.manager [None req-00e43eb0-92c1-4bd0-8f04-d9fda73618cb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1156.674553] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-00e43eb0-92c1-4bd0-8f04-d9fda73618cb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1156.674871] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-99064720-0e24-4e0e-846a-94049d575a32 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.684623] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72c10da4-705b-4c64-b86b-0a315d711bbb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.717733] env[68674]: WARNING nova.virt.vmwareapi.vmops [None req-00e43eb0-92c1-4bd0-8f04-d9fda73618cb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6cf18175-1436-4ba5-b4b3-8641ec6bdad1 could not be found. [ 1156.717788] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-00e43eb0-92c1-4bd0-8f04-d9fda73618cb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1156.717980] env[68674]: INFO nova.compute.manager [None req-00e43eb0-92c1-4bd0-8f04-d9fda73618cb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1156.718304] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-00e43eb0-92c1-4bd0-8f04-d9fda73618cb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1156.718645] env[68674]: DEBUG nova.compute.manager [-] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1156.718743] env[68674]: DEBUG nova.network.neutron [-] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1156.800927] env[68674]: DEBUG oslo_vmware.api [None req-a7e97580-e7bb-4988-a4c7-01f30ee20d42 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240953, 'name': ReconfigVM_Task, 'duration_secs': 0.176252} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.801341] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7e97580-e7bb-4988-a4c7-01f30ee20d42 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647702', 'volume_id': '7b439213-1295-4de8-b800-8ba487a72af9', 'name': 'volume-7b439213-1295-4de8-b800-8ba487a72af9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5384c82b-a584-430f-8ef1-e2731562b5ff', 'attached_at': '', 'detached_at': '', 'volume_id': '7b439213-1295-4de8-b800-8ba487a72af9', 'serial': '7b439213-1295-4de8-b800-8ba487a72af9'} {{(pid=68674) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1156.809356] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-35e9d649-89dc-45c8-9ac6-9d783fc78aef tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1156.809670] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-35e9d649-89dc-45c8-9ac6-9d783fc78aef tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1156.809916] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-35e9d649-89dc-45c8-9ac6-9d783fc78aef tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Deleting the datastore file [datastore2] 2efe81dd-caa3-4fde-8be0-fbf399ce99e0 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1156.810323] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7ccbaaf7-b06e-4a85-9d2f-27643f5188c2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.821852] env[68674]: DEBUG oslo_vmware.api [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240954, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.823657] env[68674]: DEBUG oslo_vmware.api [None req-35e9d649-89dc-45c8-9ac6-9d783fc78aef tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Waiting for the task: (returnval){ [ 1156.823657] env[68674]: value = "task-3240956" [ 1156.823657] env[68674]: _type = "Task" [ 1156.823657] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.831949] env[68674]: DEBUG oslo_vmware.api [None req-35e9d649-89dc-45c8-9ac6-9d783fc78aef tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': task-3240956, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.923687] env[68674]: DEBUG oslo_vmware.api [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240950, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.322982] env[68674]: DEBUG oslo_vmware.api [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240954, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.333102] env[68674]: DEBUG oslo_vmware.api [None req-35e9d649-89dc-45c8-9ac6-9d783fc78aef tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': task-3240956, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.389641} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.333754] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-35e9d649-89dc-45c8-9ac6-9d783fc78aef tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1157.333754] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-35e9d649-89dc-45c8-9ac6-9d783fc78aef tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1157.333754] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-35e9d649-89dc-45c8-9ac6-9d783fc78aef tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1157.334030] env[68674]: INFO nova.compute.manager [None req-35e9d649-89dc-45c8-9ac6-9d783fc78aef tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1157.335898] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-35e9d649-89dc-45c8-9ac6-9d783fc78aef tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1157.335898] env[68674]: DEBUG nova.compute.manager [-] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1157.335898] env[68674]: DEBUG nova.network.neutron [-] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1157.355391] env[68674]: DEBUG nova.objects.instance [None req-a7e97580-e7bb-4988-a4c7-01f30ee20d42 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lazy-loading 'flavor' on Instance uuid 5384c82b-a584-430f-8ef1-e2731562b5ff {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1157.423527] env[68674]: DEBUG oslo_vmware.api [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240950, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.480822] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "8f183286-f908-4d05-9a61-d6b1bf10dfb9" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1157.481464] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "8f183286-f908-4d05-9a61-d6b1bf10dfb9" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1157.481464] env[68674]: INFO nova.compute.manager [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Shelving [ 1157.482913] env[68674]: DEBUG nova.network.neutron [-] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1157.822655] env[68674]: DEBUG oslo_vmware.api [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240954, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.926218] env[68674]: DEBUG oslo_vmware.api [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240950, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.987511] env[68674]: INFO nova.compute.manager [-] [instance: 6cf18175-1436-4ba5-b4b3-8641ec6bdad1] Took 1.27 seconds to deallocate network for instance. [ 1158.162660] env[68674]: DEBUG nova.compute.manager [req-51d66ddb-984f-474e-9875-91a055fb7744 req-aa180a04-7f7a-423c-9ca3-08a217faf058 service nova] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Received event network-changed-f751e885-e868-4e41-a9e7-de64b20c643c {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1158.162873] env[68674]: DEBUG nova.compute.manager [req-51d66ddb-984f-474e-9875-91a055fb7744 req-aa180a04-7f7a-423c-9ca3-08a217faf058 service nova] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Refreshing instance network info cache due to event network-changed-f751e885-e868-4e41-a9e7-de64b20c643c. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1158.163106] env[68674]: DEBUG oslo_concurrency.lockutils [req-51d66ddb-984f-474e-9875-91a055fb7744 req-aa180a04-7f7a-423c-9ca3-08a217faf058 service nova] Acquiring lock "refresh_cache-9b8aad00-0980-4752-954a-c09c9ae6f9ec" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1158.163259] env[68674]: DEBUG oslo_concurrency.lockutils [req-51d66ddb-984f-474e-9875-91a055fb7744 req-aa180a04-7f7a-423c-9ca3-08a217faf058 service nova] Acquired lock "refresh_cache-9b8aad00-0980-4752-954a-c09c9ae6f9ec" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1158.163428] env[68674]: DEBUG nova.network.neutron [req-51d66ddb-984f-474e-9875-91a055fb7744 req-aa180a04-7f7a-423c-9ca3-08a217faf058 service nova] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Refreshing network info cache for port f751e885-e868-4e41-a9e7-de64b20c643c {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1158.323488] env[68674]: DEBUG oslo_vmware.api [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240954, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.845386} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.325080] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1158.325080] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1158.325080] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1158.345305] env[68674]: INFO nova.scheduler.client.report [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Deleted allocations for instance 9b8aad00-0980-4752-954a-c09c9ae6f9ec [ 1158.364755] env[68674]: DEBUG oslo_concurrency.lockutils [None req-a7e97580-e7bb-4988-a4c7-01f30ee20d42 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "5384c82b-a584-430f-8ef1-e2731562b5ff" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.234s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1158.425898] env[68674]: DEBUG oslo_vmware.api [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240950, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.368408} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.426233] env[68674]: INFO nova.virt.vmwareapi.ds_util [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_be2dbcc5-c3fe-4dc9-9908-264af173dcdb/OSTACK_IMG_be2dbcc5-c3fe-4dc9-9908-264af173dcdb.vmdk to [datastore1] devstack-image-cache_base/5bc5480f-c581-4561-8a80-b3996f994c28/5bc5480f-c581-4561-8a80-b3996f994c28.vmdk. [ 1158.426431] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Cleaning up location [datastore1] OSTACK_IMG_be2dbcc5-c3fe-4dc9-9908-264af173dcdb {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1158.426598] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_be2dbcc5-c3fe-4dc9-9908-264af173dcdb {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1158.426837] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8d7602a9-3d5f-4487-a7d5-8df3a6f74858 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.433529] env[68674]: DEBUG oslo_vmware.api [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 1158.433529] env[68674]: value = "task-3240957" [ 1158.433529] env[68674]: _type = "Task" [ 1158.433529] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.440814] env[68674]: DEBUG oslo_vmware.api [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240957, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.494030] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1158.494354] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aed4c263-b7e7-440a-91e5-2069e412ecc2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.503644] env[68674]: DEBUG oslo_vmware.api [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1158.503644] env[68674]: value = "task-3240958" [ 1158.503644] env[68674]: _type = "Task" [ 1158.503644] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.515078] env[68674]: DEBUG oslo_vmware.api [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240958, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.560026] env[68674]: DEBUG nova.network.neutron [-] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1158.850250] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1158.850533] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1158.850752] env[68674]: DEBUG nova.objects.instance [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lazy-loading 'resources' on Instance uuid 9b8aad00-0980-4752-954a-c09c9ae6f9ec {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1158.871164] env[68674]: DEBUG nova.network.neutron [req-51d66ddb-984f-474e-9875-91a055fb7744 req-aa180a04-7f7a-423c-9ca3-08a217faf058 service nova] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Updated VIF entry in instance network info cache for port f751e885-e868-4e41-a9e7-de64b20c643c. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1158.871523] env[68674]: DEBUG nova.network.neutron [req-51d66ddb-984f-474e-9875-91a055fb7744 req-aa180a04-7f7a-423c-9ca3-08a217faf058 service nova] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Updating instance_info_cache with network_info: [{"id": "f751e885-e868-4e41-a9e7-de64b20c643c", "address": "fa:16:3e:cb:db:79", "network": {"id": "dd6a13cc-564e-4e30-a518-536c9c1a1c8d", "bridge": null, "label": "tempest-ServersNegativeTestJSON-2104984174-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fcfc3ecd6aa74705aefa88d7a95361a0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapf751e885-e8", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1158.946890] env[68674]: DEBUG oslo_vmware.api [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240957, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.03447} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.948560] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1158.948560] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Releasing lock "[datastore1] devstack-image-cache_base/5bc5480f-c581-4561-8a80-b3996f994c28/5bc5480f-c581-4561-8a80-b3996f994c28.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1158.948560] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/5bc5480f-c581-4561-8a80-b3996f994c28/5bc5480f-c581-4561-8a80-b3996f994c28.vmdk to [datastore1] ba4bfbb4-a89b-4ab6-964e-792647fd5a89/ba4bfbb4-a89b-4ab6-964e-792647fd5a89.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1158.948560] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-026ca082-466f-4c96-9c2d-c4836958f361 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.955062] env[68674]: DEBUG oslo_vmware.api [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 1158.955062] env[68674]: value = "task-3240959" [ 1158.955062] env[68674]: _type = "Task" [ 1158.955062] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.963302] env[68674]: DEBUG oslo_vmware.api [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240959, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.016859] env[68674]: DEBUG oslo_vmware.api [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240958, 'name': PowerOffVM_Task, 'duration_secs': 0.319365} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.017221] env[68674]: DEBUG oslo_concurrency.lockutils [None req-00e43eb0-92c1-4bd0-8f04-d9fda73618cb tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "6cf18175-1436-4ba5-b4b3-8641ec6bdad1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.854s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1159.018457] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1159.020258] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b2aa45b-03f1-470d-8890-db1c1de53b05 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.053148] env[68674]: DEBUG oslo_concurrency.lockutils [None req-885a278d-72a0-4628-bbb5-b5f2e1aa6209 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "5384c82b-a584-430f-8ef1-e2731562b5ff" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1159.053148] env[68674]: DEBUG oslo_concurrency.lockutils [None req-885a278d-72a0-4628-bbb5-b5f2e1aa6209 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "5384c82b-a584-430f-8ef1-e2731562b5ff" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1159.053388] env[68674]: DEBUG oslo_concurrency.lockutils [None req-885a278d-72a0-4628-bbb5-b5f2e1aa6209 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "5384c82b-a584-430f-8ef1-e2731562b5ff-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1159.053608] env[68674]: DEBUG oslo_concurrency.lockutils [None req-885a278d-72a0-4628-bbb5-b5f2e1aa6209 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "5384c82b-a584-430f-8ef1-e2731562b5ff-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1159.053786] env[68674]: DEBUG oslo_concurrency.lockutils [None req-885a278d-72a0-4628-bbb5-b5f2e1aa6209 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "5384c82b-a584-430f-8ef1-e2731562b5ff-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1159.056215] env[68674]: INFO nova.compute.manager [None req-885a278d-72a0-4628-bbb5-b5f2e1aa6209 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Terminating instance [ 1159.058807] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df20b6fb-b755-4a01-98f0-4faa496bbb65 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.064069] env[68674]: INFO nova.compute.manager [-] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Took 1.73 seconds to deallocate network for instance. [ 1159.356157] env[68674]: DEBUG nova.objects.instance [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lazy-loading 'numa_topology' on Instance uuid 9b8aad00-0980-4752-954a-c09c9ae6f9ec {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1159.374491] env[68674]: DEBUG oslo_concurrency.lockutils [req-51d66ddb-984f-474e-9875-91a055fb7744 req-aa180a04-7f7a-423c-9ca3-08a217faf058 service nova] Releasing lock "refresh_cache-9b8aad00-0980-4752-954a-c09c9ae6f9ec" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1159.374858] env[68674]: DEBUG nova.compute.manager [req-51d66ddb-984f-474e-9875-91a055fb7744 req-aa180a04-7f7a-423c-9ca3-08a217faf058 service nova] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Received event network-vif-deleted-76984c11-ddde-4f48-8818-0911510f8452 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1159.375106] env[68674]: INFO nova.compute.manager [req-51d66ddb-984f-474e-9875-91a055fb7744 req-aa180a04-7f7a-423c-9ca3-08a217faf058 service nova] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Neutron deleted interface 76984c11-ddde-4f48-8818-0911510f8452; detaching it from the instance and deleting it from the info cache [ 1159.375425] env[68674]: DEBUG nova.network.neutron [req-51d66ddb-984f-474e-9875-91a055fb7744 req-aa180a04-7f7a-423c-9ca3-08a217faf058 service nova] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Updating instance_info_cache with network_info: [{"id": "99e1922b-5bc3-4df6-aa48-4502164af67c", "address": "fa:16:3e:24:52:08", "network": {"id": "079ae866-5b90-4122-b326-43b9a901e04c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-688371485", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.144", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "8e73e759715a4e39a03bd234d918b2fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a58387dd-f438-4913-af6a-fafb734cd881", "external-id": "nsx-vlan-transportzone-169", "segmentation_id": 169, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99e1922b-5b", "ovs_interfaceid": "99e1922b-5bc3-4df6-aa48-4502164af67c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1d39a196-8312-4fd9-b20f-b5ee8c06556b", "address": "fa:16:3e:bf:b6:bf", "network": {"id": "4b430261-417f-4868-a492-98b3b9f0467f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1859201300", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.220", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e73e759715a4e39a03bd234d918b2fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8812601-ae67-4e0d-b9a2-710b86c53ac5", "external-id": "nsx-vlan-transportzone-85", "segmentation_id": 85, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d39a196-83", "ovs_interfaceid": "1d39a196-8312-4fd9-b20f-b5ee8c06556b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1159.465335] env[68674]: DEBUG oslo_vmware.api [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240959, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.565427] env[68674]: DEBUG nova.compute.manager [None req-885a278d-72a0-4628-bbb5-b5f2e1aa6209 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1159.565643] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-885a278d-72a0-4628-bbb5-b5f2e1aa6209 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1159.566606] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cef308e-f163-4fd6-ae64-6379afd77a88 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.577177] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-885a278d-72a0-4628-bbb5-b5f2e1aa6209 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1159.578199] env[68674]: DEBUG oslo_concurrency.lockutils [None req-35e9d649-89dc-45c8-9ac6-9d783fc78aef tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1159.578444] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b3daa6a3-e0ea-4811-be1e-ce501af2723b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.581048] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Creating Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1159.581326] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-19dad416-df88-4c8e-802f-1fe32740e6b6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.588096] env[68674]: DEBUG oslo_vmware.api [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1159.588096] env[68674]: value = "task-3240961" [ 1159.588096] env[68674]: _type = "Task" [ 1159.588096] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.589731] env[68674]: DEBUG oslo_vmware.api [None req-885a278d-72a0-4628-bbb5-b5f2e1aa6209 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1159.589731] env[68674]: value = "task-3240960" [ 1159.589731] env[68674]: _type = "Task" [ 1159.589731] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.602328] env[68674]: DEBUG oslo_vmware.api [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240961, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.606397] env[68674]: DEBUG oslo_vmware.api [None req-885a278d-72a0-4628-bbb5-b5f2e1aa6209 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240960, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.613793] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "5fa43d94-64af-4cd2-9976-ca9cd994447e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1159.613793] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "5fa43d94-64af-4cd2-9976-ca9cd994447e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1159.857861] env[68674]: DEBUG nova.objects.base [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Object Instance<9b8aad00-0980-4752-954a-c09c9ae6f9ec> lazy-loaded attributes: resources,numa_topology {{(pid=68674) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1159.884060] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-490c2780-f47c-442f-9592-899b093f7467 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.904549] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb1b7653-6799-4d3f-ab29-f397c9256630 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.941301] env[68674]: DEBUG nova.compute.manager [req-51d66ddb-984f-474e-9875-91a055fb7744 req-aa180a04-7f7a-423c-9ca3-08a217faf058 service nova] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Detach interface failed, port_id=76984c11-ddde-4f48-8818-0911510f8452, reason: Instance 2efe81dd-caa3-4fde-8be0-fbf399ce99e0 could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1159.941555] env[68674]: DEBUG nova.compute.manager [req-51d66ddb-984f-474e-9875-91a055fb7744 req-aa180a04-7f7a-423c-9ca3-08a217faf058 service nova] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Received event network-vif-deleted-99e1922b-5bc3-4df6-aa48-4502164af67c {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1159.941792] env[68674]: INFO nova.compute.manager [req-51d66ddb-984f-474e-9875-91a055fb7744 req-aa180a04-7f7a-423c-9ca3-08a217faf058 service nova] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Neutron deleted interface 99e1922b-5bc3-4df6-aa48-4502164af67c; detaching it from the instance and deleting it from the info cache [ 1159.942086] env[68674]: DEBUG nova.network.neutron [req-51d66ddb-984f-474e-9875-91a055fb7744 req-aa180a04-7f7a-423c-9ca3-08a217faf058 service nova] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Updating instance_info_cache with network_info: [{"id": "1d39a196-8312-4fd9-b20f-b5ee8c06556b", "address": "fa:16:3e:bf:b6:bf", "network": {"id": "4b430261-417f-4868-a492-98b3b9f0467f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1859201300", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.220", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e73e759715a4e39a03bd234d918b2fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8812601-ae67-4e0d-b9a2-710b86c53ac5", "external-id": "nsx-vlan-transportzone-85", "segmentation_id": 85, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d39a196-83", "ovs_interfaceid": "1d39a196-8312-4fd9-b20f-b5ee8c06556b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1159.970532] env[68674]: DEBUG oslo_vmware.api [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240959, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.065483] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-704e46de-f30b-4729-8849-2467f8dd7399 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.073464] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b49a9e3-644d-4ba3-b04d-18410b24c8e4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.113882] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a80cd97-1879-43c3-bb04-244cb2beaaf5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.116888] env[68674]: DEBUG nova.compute.manager [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1160.124900] env[68674]: DEBUG oslo_vmware.api [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240961, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.130213] env[68674]: DEBUG oslo_vmware.api [None req-885a278d-72a0-4628-bbb5-b5f2e1aa6209 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240960, 'name': PowerOffVM_Task, 'duration_secs': 0.269116} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.130600] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-885a278d-72a0-4628-bbb5-b5f2e1aa6209 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1160.130828] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-885a278d-72a0-4628-bbb5-b5f2e1aa6209 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1160.132232] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98807e4f-2f66-4dd5-9d6d-87238b70daf6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.137035] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5e4e067d-7d7c-4bac-b1a4-a4116b217852 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.151336] env[68674]: DEBUG nova.compute.provider_tree [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1160.214507] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-885a278d-72a0-4628-bbb5-b5f2e1aa6209 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1160.214785] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-885a278d-72a0-4628-bbb5-b5f2e1aa6209 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1160.215017] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-885a278d-72a0-4628-bbb5-b5f2e1aa6209 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Deleting the datastore file [datastore2] 5384c82b-a584-430f-8ef1-e2731562b5ff {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1160.215345] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e6349355-f4f2-4e47-a3f0-0110b297b97f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.223265] env[68674]: DEBUG oslo_vmware.api [None req-885a278d-72a0-4628-bbb5-b5f2e1aa6209 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1160.223265] env[68674]: value = "task-3240963" [ 1160.223265] env[68674]: _type = "Task" [ 1160.223265] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.232190] env[68674]: DEBUG oslo_vmware.api [None req-885a278d-72a0-4628-bbb5-b5f2e1aa6209 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240963, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.395876] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquiring lock "9b8aad00-0980-4752-954a-c09c9ae6f9ec" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1160.448381] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-73bd730c-56ed-4217-9e4b-a2c9be01e13a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.459135] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72cdb02e-45fe-4899-88eb-f97b425b123f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.484238] env[68674]: DEBUG oslo_vmware.api [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240959, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.498317] env[68674]: DEBUG nova.compute.manager [req-51d66ddb-984f-474e-9875-91a055fb7744 req-aa180a04-7f7a-423c-9ca3-08a217faf058 service nova] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Detach interface failed, port_id=99e1922b-5bc3-4df6-aa48-4502164af67c, reason: Instance 2efe81dd-caa3-4fde-8be0-fbf399ce99e0 could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1160.498586] env[68674]: DEBUG nova.compute.manager [req-51d66ddb-984f-474e-9875-91a055fb7744 req-aa180a04-7f7a-423c-9ca3-08a217faf058 service nova] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Received event network-vif-deleted-1d39a196-8312-4fd9-b20f-b5ee8c06556b {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1160.498770] env[68674]: INFO nova.compute.manager [req-51d66ddb-984f-474e-9875-91a055fb7744 req-aa180a04-7f7a-423c-9ca3-08a217faf058 service nova] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Neutron deleted interface 1d39a196-8312-4fd9-b20f-b5ee8c06556b; detaching it from the instance and deleting it from the info cache [ 1160.498956] env[68674]: DEBUG nova.network.neutron [req-51d66ddb-984f-474e-9875-91a055fb7744 req-aa180a04-7f7a-423c-9ca3-08a217faf058 service nova] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1160.617443] env[68674]: DEBUG oslo_vmware.api [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240961, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.636780] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1160.654840] env[68674]: DEBUG nova.scheduler.client.report [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1160.734254] env[68674]: DEBUG oslo_vmware.api [None req-885a278d-72a0-4628-bbb5-b5f2e1aa6209 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3240963, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.354006} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.734538] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-885a278d-72a0-4628-bbb5-b5f2e1aa6209 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1160.734701] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-885a278d-72a0-4628-bbb5-b5f2e1aa6209 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1160.734913] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-885a278d-72a0-4628-bbb5-b5f2e1aa6209 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1160.735121] env[68674]: INFO nova.compute.manager [None req-885a278d-72a0-4628-bbb5-b5f2e1aa6209 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1160.735383] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-885a278d-72a0-4628-bbb5-b5f2e1aa6209 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1160.735617] env[68674]: DEBUG nova.compute.manager [-] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1160.735719] env[68674]: DEBUG nova.network.neutron [-] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1160.985876] env[68674]: DEBUG oslo_vmware.api [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240959, 'name': CopyVirtualDisk_Task} progress is 88%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.001759] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a279f636-a8ee-47ab-82e6-96b0c3239675 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.010741] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba583e9a-ad01-40f5-995f-07fc43fd3c02 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.042783] env[68674]: DEBUG nova.compute.manager [req-51d66ddb-984f-474e-9875-91a055fb7744 req-aa180a04-7f7a-423c-9ca3-08a217faf058 service nova] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Detach interface failed, port_id=1d39a196-8312-4fd9-b20f-b5ee8c06556b, reason: Instance 2efe81dd-caa3-4fde-8be0-fbf399ce99e0 could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1161.116849] env[68674]: DEBUG oslo_vmware.api [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240961, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.160644] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.310s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1161.166891] env[68674]: DEBUG oslo_concurrency.lockutils [None req-35e9d649-89dc-45c8-9ac6-9d783fc78aef tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.585s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1161.166891] env[68674]: DEBUG nova.objects.instance [None req-35e9d649-89dc-45c8-9ac6-9d783fc78aef tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Lazy-loading 'resources' on Instance uuid 2efe81dd-caa3-4fde-8be0-fbf399ce99e0 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1161.223239] env[68674]: DEBUG nova.compute.manager [req-f4aad9b1-93e8-4aa4-be08-58ff6b2d893a req-8458871c-011e-44c8-9981-cc890b1e7798 service nova] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Received event network-vif-deleted-f536bcae-200f-4668-94f8-520d08d06653 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1161.223464] env[68674]: INFO nova.compute.manager [req-f4aad9b1-93e8-4aa4-be08-58ff6b2d893a req-8458871c-011e-44c8-9981-cc890b1e7798 service nova] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Neutron deleted interface f536bcae-200f-4668-94f8-520d08d06653; detaching it from the instance and deleting it from the info cache [ 1161.223734] env[68674]: DEBUG nova.network.neutron [req-f4aad9b1-93e8-4aa4-be08-58ff6b2d893a req-8458871c-011e-44c8-9981-cc890b1e7798 service nova] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1161.486287] env[68674]: DEBUG oslo_vmware.api [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240959, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.309855} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.486287] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/5bc5480f-c581-4561-8a80-b3996f994c28/5bc5480f-c581-4561-8a80-b3996f994c28.vmdk to [datastore1] ba4bfbb4-a89b-4ab6-964e-792647fd5a89/ba4bfbb4-a89b-4ab6-964e-792647fd5a89.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1161.487017] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bae9f900-500c-49e9-89a2-113977720127 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.511267] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] ba4bfbb4-a89b-4ab6-964e-792647fd5a89/ba4bfbb4-a89b-4ab6-964e-792647fd5a89.vmdk or device None with type streamOptimized {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1161.511611] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-afee7207-3770-4bf4-a6ad-d57d45167a73 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.531132] env[68674]: DEBUG oslo_vmware.api [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 1161.531132] env[68674]: value = "task-3240964" [ 1161.531132] env[68674]: _type = "Task" [ 1161.531132] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.539583] env[68674]: DEBUG oslo_vmware.api [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240964, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.617953] env[68674]: DEBUG oslo_vmware.api [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240961, 'name': CreateSnapshot_Task, 'duration_secs': 1.761874} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.618279] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Created Snapshot of the VM instance {{(pid=68674) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1161.619043] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bcf45de-faa7-4563-9572-362c9d9360f7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.670866] env[68674]: DEBUG nova.network.neutron [-] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1161.672407] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1fb583c8-3068-44e2-a1f8-b5b6c31b9660 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lock "9b8aad00-0980-4752-954a-c09c9ae6f9ec" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 23.351s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1161.673477] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "9b8aad00-0980-4752-954a-c09c9ae6f9ec" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 12.997s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1161.673477] env[68674]: INFO nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] During sync_power_state the instance has a pending task (shelving_image_uploading). Skip. [ 1161.673627] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "9b8aad00-0980-4752-954a-c09c9ae6f9ec" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1161.674358] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lock "9b8aad00-0980-4752-954a-c09c9ae6f9ec" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.278s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1161.674358] env[68674]: INFO nova.compute.manager [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Unshelving [ 1161.733203] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-83a57b34-5b9f-473a-a877-b5ef67c9426d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.743127] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ee3f2fa-f5ad-415b-86fd-850f85493f1e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.779763] env[68674]: DEBUG nova.compute.manager [req-f4aad9b1-93e8-4aa4-be08-58ff6b2d893a req-8458871c-011e-44c8-9981-cc890b1e7798 service nova] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Detach interface failed, port_id=f536bcae-200f-4668-94f8-520d08d06653, reason: Instance 5384c82b-a584-430f-8ef1-e2731562b5ff could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1161.854752] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f9b65f8-fbc6-48ad-9b7b-830892861f94 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.861877] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04e74b01-fd31-4b26-bb01-4cde839a8381 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.891341] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77e5eae4-5757-4816-807c-f3adce3cf825 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.898162] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e26393ea-d139-406e-aa47-f3c0bd768844 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.910784] env[68674]: DEBUG nova.compute.provider_tree [None req-35e9d649-89dc-45c8-9ac6-9d783fc78aef tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1162.041392] env[68674]: DEBUG oslo_vmware.api [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240964, 'name': ReconfigVM_Task, 'duration_secs': 0.286084} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.041680] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Reconfigured VM instance instance-0000006a to attach disk [datastore1] ba4bfbb4-a89b-4ab6-964e-792647fd5a89/ba4bfbb4-a89b-4ab6-964e-792647fd5a89.vmdk or device None with type streamOptimized {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1162.042328] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fa66fc86-fe3c-444a-b139-6553531e1846 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.048694] env[68674]: DEBUG oslo_vmware.api [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 1162.048694] env[68674]: value = "task-3240965" [ 1162.048694] env[68674]: _type = "Task" [ 1162.048694] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.055964] env[68674]: DEBUG oslo_vmware.api [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240965, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.136750] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Creating linked-clone VM from snapshot {{(pid=68674) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1162.137181] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-c97b380d-4b75-4448-b77b-f228f91ed648 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.145153] env[68674]: DEBUG oslo_vmware.api [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1162.145153] env[68674]: value = "task-3240966" [ 1162.145153] env[68674]: _type = "Task" [ 1162.145153] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.153352] env[68674]: DEBUG oslo_vmware.api [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240966, 'name': CloneVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.173068] env[68674]: INFO nova.compute.manager [-] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Took 1.44 seconds to deallocate network for instance. [ 1162.414363] env[68674]: DEBUG nova.scheduler.client.report [None req-35e9d649-89dc-45c8-9ac6-9d783fc78aef tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1162.558421] env[68674]: DEBUG oslo_vmware.api [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240965, 'name': Rename_Task, 'duration_secs': 0.157398} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.558787] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1162.558916] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-41cdc9d2-1070-4458-b7f8-d9c7f38505bc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.565158] env[68674]: DEBUG oslo_vmware.api [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 1162.565158] env[68674]: value = "task-3240967" [ 1162.565158] env[68674]: _type = "Task" [ 1162.565158] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.572817] env[68674]: DEBUG oslo_vmware.api [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240967, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.655215] env[68674]: DEBUG oslo_vmware.api [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240966, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.680587] env[68674]: DEBUG oslo_concurrency.lockutils [None req-885a278d-72a0-4628-bbb5-b5f2e1aa6209 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1162.696311] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1162.904406] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1162.919537] env[68674]: DEBUG oslo_concurrency.lockutils [None req-35e9d649-89dc-45c8-9ac6-9d783fc78aef tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.756s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1162.921980] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.285s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1162.923605] env[68674]: INFO nova.compute.claims [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1162.940168] env[68674]: INFO nova.scheduler.client.report [None req-35e9d649-89dc-45c8-9ac6-9d783fc78aef tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Deleted allocations for instance 2efe81dd-caa3-4fde-8be0-fbf399ce99e0 [ 1163.074983] env[68674]: DEBUG oslo_vmware.api [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240967, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.157327] env[68674]: DEBUG oslo_vmware.api [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240966, 'name': CloneVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.448735] env[68674]: DEBUG oslo_concurrency.lockutils [None req-35e9d649-89dc-45c8-9ac6-9d783fc78aef tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Lock "2efe81dd-caa3-4fde-8be0-fbf399ce99e0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.854s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1163.576755] env[68674]: DEBUG oslo_vmware.api [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3240967, 'name': PowerOnVM_Task, 'duration_secs': 0.955423} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.577056] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1163.657410] env[68674]: DEBUG oslo_vmware.api [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240966, 'name': CloneVM_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.692908] env[68674]: DEBUG nova.compute.manager [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1163.693942] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42b9becc-2165-42e5-bd7a-f0c66ddcd02b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.071013] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fca02292-32a6-4b30-b3c5-ae7b5db1f4b3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.079229] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d74f62e0-8efc-46d2-9ff2-80e96194b430 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.113553] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-191030ca-cfc7-4c93-a043-8d2043cefc0e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.123268] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e7f7e2b-e9b1-4fb8-9a65-6ee1ceaf4011 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.137223] env[68674]: DEBUG nova.compute.provider_tree [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1164.160080] env[68674]: DEBUG oslo_vmware.api [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240966, 'name': CloneVM_Task, 'duration_secs': 1.565426} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.160080] env[68674]: INFO nova.virt.vmwareapi.vmops [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Created linked-clone VM from snapshot [ 1164.160080] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ef8b8a7-3cc3-4e3e-9e24-606b47db9715 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.167990] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Uploading image 62894887-4050-4053-af59-8bdae4e2d98c {{(pid=68674) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1164.192113] env[68674]: DEBUG oslo_vmware.rw_handles [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1164.192113] env[68674]: value = "vm-647709" [ 1164.192113] env[68674]: _type = "VirtualMachine" [ 1164.192113] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1164.192113] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-c5335aa1-7f6e-4c07-af72-98e77b3aac7c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.203464] env[68674]: DEBUG oslo_vmware.rw_handles [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lease: (returnval){ [ 1164.203464] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c2d0c9-4587-82da-1582-f0c704534a32" [ 1164.203464] env[68674]: _type = "HttpNfcLease" [ 1164.203464] env[68674]: } obtained for exporting VM: (result){ [ 1164.203464] env[68674]: value = "vm-647709" [ 1164.203464] env[68674]: _type = "VirtualMachine" [ 1164.203464] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1164.203464] env[68674]: DEBUG oslo_vmware.api [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the lease: (returnval){ [ 1164.203464] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c2d0c9-4587-82da-1582-f0c704534a32" [ 1164.203464] env[68674]: _type = "HttpNfcLease" [ 1164.203464] env[68674]: } to be ready. {{(pid=68674) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1164.215486] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1164.215486] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c2d0c9-4587-82da-1582-f0c704534a32" [ 1164.215486] env[68674]: _type = "HttpNfcLease" [ 1164.215486] env[68674]: } is initializing. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1164.215486] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4c07f81d-e790-4dda-b555-5f19ca9fa6cd tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "ba4bfbb4-a89b-4ab6-964e-792647fd5a89" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 21.846s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1164.215962] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "ba4bfbb4-a89b-4ab6-964e-792647fd5a89" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 15.540s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1164.217041] env[68674]: INFO nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] During sync_power_state the instance has a pending task (spawning). Skip. [ 1164.217041] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "ba4bfbb4-a89b-4ab6-964e-792647fd5a89" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1164.366196] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1164.366327] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1164.641378] env[68674]: DEBUG nova.scheduler.client.report [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1164.711365] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1164.711365] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c2d0c9-4587-82da-1582-f0c704534a32" [ 1164.711365] env[68674]: _type = "HttpNfcLease" [ 1164.711365] env[68674]: } is ready. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1164.711665] env[68674]: DEBUG oslo_vmware.rw_handles [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1164.711665] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c2d0c9-4587-82da-1582-f0c704534a32" [ 1164.711665] env[68674]: _type = "HttpNfcLease" [ 1164.711665] env[68674]: }. {{(pid=68674) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1164.712393] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a17fa8f0-e59a-4d82-bf4c-44ac7bb70cae {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.720102] env[68674]: DEBUG oslo_vmware.rw_handles [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52806eb7-b044-f504-890a-941e6a408f64/disk-0.vmdk from lease info. {{(pid=68674) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1164.720328] env[68674]: DEBUG oslo_vmware.rw_handles [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52806eb7-b044-f504-890a-941e6a408f64/disk-0.vmdk for reading. {{(pid=68674) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1164.869913] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-c527fddb-43cc-439f-be32-50ff0e21be06 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.146888] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.225s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1165.147775] env[68674]: DEBUG nova.compute.manager [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1165.150219] env[68674]: DEBUG oslo_concurrency.lockutils [None req-885a278d-72a0-4628-bbb5-b5f2e1aa6209 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.470s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1165.150987] env[68674]: DEBUG nova.objects.instance [None req-885a278d-72a0-4628-bbb5-b5f2e1aa6209 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lazy-loading 'resources' on Instance uuid 5384c82b-a584-430f-8ef1-e2731562b5ff {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1165.365967] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1165.366161] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68674) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1165.654132] env[68674]: DEBUG nova.compute.utils [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1165.658540] env[68674]: DEBUG nova.compute.manager [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1165.658831] env[68674]: DEBUG nova.network.neutron [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1165.700261] env[68674]: DEBUG nova.policy [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd28e9b76e01f463bbb375cbd9c51684f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '81afe76c94de4e94b53f15af0ef95e66', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 1165.823230] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e24f36d8-0d4c-4983-a4f7-3fd31975af51 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.832247] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93efa860-db19-4256-8d91-f3af0c0554ee {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.866519] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26a7053d-4a7a-4b48-8fcd-a3e311e67703 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.875386] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd22571-4592-437f-ae6f-80dcdb6c2cb9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.891827] env[68674]: DEBUG nova.compute.provider_tree [None req-885a278d-72a0-4628-bbb5-b5f2e1aa6209 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1165.998998] env[68674]: DEBUG nova.network.neutron [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Successfully created port: c7865d2c-4c30-4dd4-b925-759844ba4a80 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1166.162358] env[68674]: DEBUG nova.compute.manager [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1166.228199] env[68674]: DEBUG oslo_concurrency.lockutils [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Acquiring lock "0cbfda3e-337f-41f6-add2-1dcd725b0953" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1166.230968] env[68674]: DEBUG oslo_concurrency.lockutils [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Lock "0cbfda3e-337f-41f6-add2-1dcd725b0953" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1166.366181] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1166.395879] env[68674]: DEBUG nova.scheduler.client.report [None req-885a278d-72a0-4628-bbb5-b5f2e1aa6209 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1166.516449] env[68674]: DEBUG oslo_concurrency.lockutils [None req-96646f22-75c5-4c62-88fb-743a5165286e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquiring lock "142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1166.516690] env[68674]: DEBUG oslo_concurrency.lockutils [None req-96646f22-75c5-4c62-88fb-743a5165286e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1166.516890] env[68674]: DEBUG oslo_concurrency.lockutils [None req-96646f22-75c5-4c62-88fb-743a5165286e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquiring lock "142e8ede-90e2-47cf-a1b1-8c4fd59eed0a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1166.517097] env[68674]: DEBUG oslo_concurrency.lockutils [None req-96646f22-75c5-4c62-88fb-743a5165286e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "142e8ede-90e2-47cf-a1b1-8c4fd59eed0a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1166.517276] env[68674]: DEBUG oslo_concurrency.lockutils [None req-96646f22-75c5-4c62-88fb-743a5165286e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "142e8ede-90e2-47cf-a1b1-8c4fd59eed0a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1166.519732] env[68674]: INFO nova.compute.manager [None req-96646f22-75c5-4c62-88fb-743a5165286e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Terminating instance [ 1166.734577] env[68674]: DEBUG nova.compute.manager [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1166.900199] env[68674]: DEBUG oslo_concurrency.lockutils [None req-885a278d-72a0-4628-bbb5-b5f2e1aa6209 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.750s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1166.902811] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.207s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1166.903073] env[68674]: DEBUG nova.objects.instance [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lazy-loading 'pci_requests' on Instance uuid 9b8aad00-0980-4752-954a-c09c9ae6f9ec {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1166.927651] env[68674]: INFO nova.scheduler.client.report [None req-885a278d-72a0-4628-bbb5-b5f2e1aa6209 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Deleted allocations for instance 5384c82b-a584-430f-8ef1-e2731562b5ff [ 1167.024389] env[68674]: DEBUG nova.compute.manager [None req-96646f22-75c5-4c62-88fb-743a5165286e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1167.024649] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-96646f22-75c5-4c62-88fb-743a5165286e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1167.025595] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f0e9bff-ff64-48e7-8bcf-9d177a2e290f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.034519] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-96646f22-75c5-4c62-88fb-743a5165286e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1167.034775] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6ee0f4db-fe37-4d54-b253-78fd0c551946 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.042557] env[68674]: DEBUG oslo_vmware.api [None req-96646f22-75c5-4c62-88fb-743a5165286e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for the task: (returnval){ [ 1167.042557] env[68674]: value = "task-3240969" [ 1167.042557] env[68674]: _type = "Task" [ 1167.042557] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.050981] env[68674]: DEBUG oslo_vmware.api [None req-96646f22-75c5-4c62-88fb-743a5165286e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3240969, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.179036] env[68674]: DEBUG nova.compute.manager [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1167.207850] env[68674]: DEBUG nova.virt.hardware [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1167.208284] env[68674]: DEBUG nova.virt.hardware [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1167.208538] env[68674]: DEBUG nova.virt.hardware [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1167.208817] env[68674]: DEBUG nova.virt.hardware [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1167.209082] env[68674]: DEBUG nova.virt.hardware [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1167.209330] env[68674]: DEBUG nova.virt.hardware [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1167.209654] env[68674]: DEBUG nova.virt.hardware [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1167.209907] env[68674]: DEBUG nova.virt.hardware [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1167.210204] env[68674]: DEBUG nova.virt.hardware [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1167.210481] env[68674]: DEBUG nova.virt.hardware [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1167.210770] env[68674]: DEBUG nova.virt.hardware [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1167.211820] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99c0c045-3b09-4582-8c30-42f103b20dcf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.222549] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a34f8907-8d2c-40e3-b9b1-b280e2509884 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.265519] env[68674]: DEBUG oslo_concurrency.lockutils [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1167.361405] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1167.366181] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1167.407459] env[68674]: DEBUG nova.objects.instance [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lazy-loading 'numa_topology' on Instance uuid 9b8aad00-0980-4752-954a-c09c9ae6f9ec {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1167.437612] env[68674]: DEBUG oslo_concurrency.lockutils [None req-885a278d-72a0-4628-bbb5-b5f2e1aa6209 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "5384c82b-a584-430f-8ef1-e2731562b5ff" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.384s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.553134] env[68674]: DEBUG nova.compute.manager [req-7eb72305-2252-4fb3-81a5-cbc633188c57 req-5eb666fc-de7f-4602-91ba-b124301dc11c service nova] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Received event network-vif-plugged-c7865d2c-4c30-4dd4-b925-759844ba4a80 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1167.553362] env[68674]: DEBUG oslo_concurrency.lockutils [req-7eb72305-2252-4fb3-81a5-cbc633188c57 req-5eb666fc-de7f-4602-91ba-b124301dc11c service nova] Acquiring lock "5fa43d94-64af-4cd2-9976-ca9cd994447e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1167.553653] env[68674]: DEBUG oslo_concurrency.lockutils [req-7eb72305-2252-4fb3-81a5-cbc633188c57 req-5eb666fc-de7f-4602-91ba-b124301dc11c service nova] Lock "5fa43d94-64af-4cd2-9976-ca9cd994447e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1167.553786] env[68674]: DEBUG oslo_concurrency.lockutils [req-7eb72305-2252-4fb3-81a5-cbc633188c57 req-5eb666fc-de7f-4602-91ba-b124301dc11c service nova] Lock "5fa43d94-64af-4cd2-9976-ca9cd994447e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.553919] env[68674]: DEBUG nova.compute.manager [req-7eb72305-2252-4fb3-81a5-cbc633188c57 req-5eb666fc-de7f-4602-91ba-b124301dc11c service nova] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] No waiting events found dispatching network-vif-plugged-c7865d2c-4c30-4dd4-b925-759844ba4a80 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1167.554105] env[68674]: WARNING nova.compute.manager [req-7eb72305-2252-4fb3-81a5-cbc633188c57 req-5eb666fc-de7f-4602-91ba-b124301dc11c service nova] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Received unexpected event network-vif-plugged-c7865d2c-4c30-4dd4-b925-759844ba4a80 for instance with vm_state building and task_state spawning. [ 1167.558159] env[68674]: DEBUG oslo_vmware.api [None req-96646f22-75c5-4c62-88fb-743a5165286e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3240969, 'name': PowerOffVM_Task, 'duration_secs': 0.251428} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.558523] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-96646f22-75c5-4c62-88fb-743a5165286e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1167.558598] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-96646f22-75c5-4c62-88fb-743a5165286e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1167.558843] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-46ed84ba-8a3f-47bd-83da-6188beb2ef03 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.637680] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-96646f22-75c5-4c62-88fb-743a5165286e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1167.637998] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-96646f22-75c5-4c62-88fb-743a5165286e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1167.638253] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-96646f22-75c5-4c62-88fb-743a5165286e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Deleting the datastore file [datastore2] 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1167.638538] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-edfbfcfb-ef11-404d-9902-0644b3764db6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.649405] env[68674]: DEBUG oslo_vmware.api [None req-96646f22-75c5-4c62-88fb-743a5165286e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for the task: (returnval){ [ 1167.649405] env[68674]: value = "task-3240971" [ 1167.649405] env[68674]: _type = "Task" [ 1167.649405] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.666022] env[68674]: DEBUG nova.network.neutron [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Successfully updated port: c7865d2c-4c30-4dd4-b925-759844ba4a80 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1167.667364] env[68674]: DEBUG oslo_vmware.api [None req-96646f22-75c5-4c62-88fb-743a5165286e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3240971, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.914395] env[68674]: INFO nova.compute.claims [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1168.163978] env[68674]: DEBUG oslo_vmware.api [None req-96646f22-75c5-4c62-88fb-743a5165286e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3240971, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.175037} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.165160] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-96646f22-75c5-4c62-88fb-743a5165286e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1168.165324] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-96646f22-75c5-4c62-88fb-743a5165286e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1168.165732] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-96646f22-75c5-4c62-88fb-743a5165286e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1168.165965] env[68674]: INFO nova.compute.manager [None req-96646f22-75c5-4c62-88fb-743a5165286e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1168.166190] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-96646f22-75c5-4c62-88fb-743a5165286e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1168.166387] env[68674]: DEBUG nova.compute.manager [-] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1168.166486] env[68674]: DEBUG nova.network.neutron [-] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1168.169865] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "refresh_cache-5fa43d94-64af-4cd2-9976-ca9cd994447e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1168.169979] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquired lock "refresh_cache-5fa43d94-64af-4cd2-9976-ca9cd994447e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1168.170128] env[68674]: DEBUG nova.network.neutron [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1168.714730] env[68674]: DEBUG nova.network.neutron [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1168.919915] env[68674]: DEBUG nova.network.neutron [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Updating instance_info_cache with network_info: [{"id": "c7865d2c-4c30-4dd4-b925-759844ba4a80", "address": "fa:16:3e:f8:a0:d5", "network": {"id": "14f41484-287c-4789-9e0c-fcc5f0e92e0d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-787923662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81afe76c94de4e94b53f15af0ef95e66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "877ed63d-906e-4bd5-a1fc-7e82d172d41e", "external-id": "nsx-vlan-transportzone-642", "segmentation_id": 642, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7865d2c-4c", "ovs_interfaceid": "c7865d2c-4c30-4dd4-b925-759844ba4a80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1169.100193] env[68674]: DEBUG nova.network.neutron [-] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1169.125815] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df45de95-7675-4383-810a-1ede3670fcde {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.138182] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cba4a6a5-9723-4d6d-a39a-267a0882e893 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.172925] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-047e4761-710a-4691-9210-60a4b4d98bed {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.181495] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b30f3cd-9cdd-4028-a432-e0b49d6d41b6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.202485] env[68674]: DEBUG nova.compute.provider_tree [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1169.365857] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager.update_available_resource {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1169.422658] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Releasing lock "refresh_cache-5fa43d94-64af-4cd2-9976-ca9cd994447e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1169.423068] env[68674]: DEBUG nova.compute.manager [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Instance network_info: |[{"id": "c7865d2c-4c30-4dd4-b925-759844ba4a80", "address": "fa:16:3e:f8:a0:d5", "network": {"id": "14f41484-287c-4789-9e0c-fcc5f0e92e0d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-787923662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81afe76c94de4e94b53f15af0ef95e66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "877ed63d-906e-4bd5-a1fc-7e82d172d41e", "external-id": "nsx-vlan-transportzone-642", "segmentation_id": 642, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7865d2c-4c", "ovs_interfaceid": "c7865d2c-4c30-4dd4-b925-759844ba4a80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1169.423564] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:a0:d5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '877ed63d-906e-4bd5-a1fc-7e82d172d41e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c7865d2c-4c30-4dd4-b925-759844ba4a80', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1169.431321] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1169.431560] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1169.431802] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-96805292-d567-49c7-891a-35034be64af9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.455121] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1169.455121] env[68674]: value = "task-3240972" [ 1169.455121] env[68674]: _type = "Task" [ 1169.455121] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.463046] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240972, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.585464] env[68674]: DEBUG nova.compute.manager [req-f0c867ab-d0ac-44ab-a13b-61e86cc4eee6 req-da4d226c-0ff5-4967-8989-cca9ce5b582b service nova] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Received event network-changed-c7865d2c-4c30-4dd4-b925-759844ba4a80 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1169.585756] env[68674]: DEBUG nova.compute.manager [req-f0c867ab-d0ac-44ab-a13b-61e86cc4eee6 req-da4d226c-0ff5-4967-8989-cca9ce5b582b service nova] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Refreshing instance network info cache due to event network-changed-c7865d2c-4c30-4dd4-b925-759844ba4a80. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1169.586081] env[68674]: DEBUG oslo_concurrency.lockutils [req-f0c867ab-d0ac-44ab-a13b-61e86cc4eee6 req-da4d226c-0ff5-4967-8989-cca9ce5b582b service nova] Acquiring lock "refresh_cache-5fa43d94-64af-4cd2-9976-ca9cd994447e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1169.586263] env[68674]: DEBUG oslo_concurrency.lockutils [req-f0c867ab-d0ac-44ab-a13b-61e86cc4eee6 req-da4d226c-0ff5-4967-8989-cca9ce5b582b service nova] Acquired lock "refresh_cache-5fa43d94-64af-4cd2-9976-ca9cd994447e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1169.586447] env[68674]: DEBUG nova.network.neutron [req-f0c867ab-d0ac-44ab-a13b-61e86cc4eee6 req-da4d226c-0ff5-4967-8989-cca9ce5b582b service nova] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Refreshing network info cache for port c7865d2c-4c30-4dd4-b925-759844ba4a80 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1169.606278] env[68674]: INFO nova.compute.manager [-] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Took 1.44 seconds to deallocate network for instance. [ 1169.647657] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "1b276f5a-9e53-4ef9-892b-4e4bd0dc09df" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1169.647903] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "1b276f5a-9e53-4ef9-892b-4e4bd0dc09df" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1169.707294] env[68674]: DEBUG nova.scheduler.client.report [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1169.868952] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1169.965023] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240972, 'name': CreateVM_Task, 'duration_secs': 0.341124} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.965023] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1169.971924] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1169.972150] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1169.972830] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1169.972830] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63aa9df7-2da8-444a-8132-105defcaee3f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.977182] env[68674]: DEBUG oslo_vmware.api [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1169.977182] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52718941-aa6b-49c7-577c-827c4a8e802d" [ 1169.977182] env[68674]: _type = "Task" [ 1169.977182] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.986675] env[68674]: DEBUG oslo_vmware.api [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52718941-aa6b-49c7-577c-827c4a8e802d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.113645] env[68674]: DEBUG oslo_concurrency.lockutils [None req-96646f22-75c5-4c62-88fb-743a5165286e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1170.150568] env[68674]: DEBUG nova.compute.manager [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1170.211540] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.309s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1170.213477] env[68674]: DEBUG oslo_concurrency.lockutils [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.948s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1170.215414] env[68674]: INFO nova.compute.claims [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1170.268662] env[68674]: INFO nova.network.neutron [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Updating port f751e885-e868-4e41-a9e7-de64b20c643c with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1170.308359] env[68674]: DEBUG nova.network.neutron [req-f0c867ab-d0ac-44ab-a13b-61e86cc4eee6 req-da4d226c-0ff5-4967-8989-cca9ce5b582b service nova] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Updated VIF entry in instance network info cache for port c7865d2c-4c30-4dd4-b925-759844ba4a80. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1170.308767] env[68674]: DEBUG nova.network.neutron [req-f0c867ab-d0ac-44ab-a13b-61e86cc4eee6 req-da4d226c-0ff5-4967-8989-cca9ce5b582b service nova] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Updating instance_info_cache with network_info: [{"id": "c7865d2c-4c30-4dd4-b925-759844ba4a80", "address": "fa:16:3e:f8:a0:d5", "network": {"id": "14f41484-287c-4789-9e0c-fcc5f0e92e0d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-787923662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81afe76c94de4e94b53f15af0ef95e66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "877ed63d-906e-4bd5-a1fc-7e82d172d41e", "external-id": "nsx-vlan-transportzone-642", "segmentation_id": 642, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7865d2c-4c", "ovs_interfaceid": "c7865d2c-4c30-4dd4-b925-759844ba4a80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1170.488339] env[68674]: DEBUG oslo_vmware.api [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52718941-aa6b-49c7-577c-827c4a8e802d, 'name': SearchDatastore_Task, 'duration_secs': 0.009763} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.488686] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1170.488951] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1170.489221] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1170.489402] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1170.489612] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1170.489904] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-13c4d19a-e147-47a4-ba03-884f791e3abe {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.498185] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1170.498443] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1170.499187] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a40501d5-0b03-4d61-92d0-552c7f31c2c3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.504354] env[68674]: DEBUG oslo_vmware.api [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1170.504354] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c709da-8fe0-3cac-a2be-747799814bb9" [ 1170.504354] env[68674]: _type = "Task" [ 1170.504354] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.512515] env[68674]: DEBUG oslo_vmware.api [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c709da-8fe0-3cac-a2be-747799814bb9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.674265] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1170.811575] env[68674]: DEBUG oslo_concurrency.lockutils [req-f0c867ab-d0ac-44ab-a13b-61e86cc4eee6 req-da4d226c-0ff5-4967-8989-cca9ce5b582b service nova] Releasing lock "refresh_cache-5fa43d94-64af-4cd2-9976-ca9cd994447e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1170.811841] env[68674]: DEBUG nova.compute.manager [req-f0c867ab-d0ac-44ab-a13b-61e86cc4eee6 req-da4d226c-0ff5-4967-8989-cca9ce5b582b service nova] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Received event network-vif-deleted-4d94c698-e74c-4238-8f2e-ead75015687e {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1171.015408] env[68674]: DEBUG oslo_vmware.api [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52c709da-8fe0-3cac-a2be-747799814bb9, 'name': SearchDatastore_Task, 'duration_secs': 0.009119} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.016405] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57aa5f26-3b74-4f31-8cbc-b55940071aee {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.022415] env[68674]: DEBUG oslo_vmware.api [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1171.022415] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]526c2092-e1cd-321e-c310-9c8bc552086b" [ 1171.022415] env[68674]: _type = "Task" [ 1171.022415] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.030699] env[68674]: DEBUG oslo_vmware.api [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]526c2092-e1cd-321e-c310-9c8bc552086b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.380800] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63fe9c89-f669-4238-8bb4-082a370c82ef {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.388710] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19d8a995-e532-4549-9737-14063f0360eb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.420256] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96a5e72a-67fd-44b4-a7c2-191f4029d1ee {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.427976] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66632626-2fe8-4ba0-bc77-36bf5b19e947 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.441713] env[68674]: DEBUG nova.compute.provider_tree [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1171.532793] env[68674]: DEBUG oslo_vmware.api [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]526c2092-e1cd-321e-c310-9c8bc552086b, 'name': SearchDatastore_Task, 'duration_secs': 0.01033} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.533174] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1171.533427] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 5fa43d94-64af-4cd2-9976-ca9cd994447e/5fa43d94-64af-4cd2-9976-ca9cd994447e.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1171.533687] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-be93aae9-f18a-4a1f-8cbc-c968cf9d6988 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.540221] env[68674]: DEBUG oslo_vmware.api [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1171.540221] env[68674]: value = "task-3240973" [ 1171.540221] env[68674]: _type = "Task" [ 1171.540221] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.549261] env[68674]: DEBUG oslo_vmware.api [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240973, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.635688] env[68674]: DEBUG nova.compute.manager [req-4c47ccb0-92ce-4c5e-bbdb-c9a442ce4765 req-8aafab80-1939-41e4-9257-a9e4da7da378 service nova] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Received event network-vif-plugged-f751e885-e868-4e41-a9e7-de64b20c643c {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1171.635845] env[68674]: DEBUG oslo_concurrency.lockutils [req-4c47ccb0-92ce-4c5e-bbdb-c9a442ce4765 req-8aafab80-1939-41e4-9257-a9e4da7da378 service nova] Acquiring lock "9b8aad00-0980-4752-954a-c09c9ae6f9ec-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1171.636111] env[68674]: DEBUG oslo_concurrency.lockutils [req-4c47ccb0-92ce-4c5e-bbdb-c9a442ce4765 req-8aafab80-1939-41e4-9257-a9e4da7da378 service nova] Lock "9b8aad00-0980-4752-954a-c09c9ae6f9ec-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1171.636292] env[68674]: DEBUG oslo_concurrency.lockutils [req-4c47ccb0-92ce-4c5e-bbdb-c9a442ce4765 req-8aafab80-1939-41e4-9257-a9e4da7da378 service nova] Lock "9b8aad00-0980-4752-954a-c09c9ae6f9ec-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1171.636481] env[68674]: DEBUG nova.compute.manager [req-4c47ccb0-92ce-4c5e-bbdb-c9a442ce4765 req-8aafab80-1939-41e4-9257-a9e4da7da378 service nova] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] No waiting events found dispatching network-vif-plugged-f751e885-e868-4e41-a9e7-de64b20c643c {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1171.636664] env[68674]: WARNING nova.compute.manager [req-4c47ccb0-92ce-4c5e-bbdb-c9a442ce4765 req-8aafab80-1939-41e4-9257-a9e4da7da378 service nova] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Received unexpected event network-vif-plugged-f751e885-e868-4e41-a9e7-de64b20c643c for instance with vm_state shelved_offloaded and task_state spawning. [ 1171.772440] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquiring lock "refresh_cache-9b8aad00-0980-4752-954a-c09c9ae6f9ec" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1171.772761] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquired lock "refresh_cache-9b8aad00-0980-4752-954a-c09c9ae6f9ec" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1171.773084] env[68674]: DEBUG nova.network.neutron [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1171.945130] env[68674]: DEBUG nova.scheduler.client.report [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1172.052044] env[68674]: DEBUG oslo_vmware.api [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240973, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.464698} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.052371] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 5fa43d94-64af-4cd2-9976-ca9cd994447e/5fa43d94-64af-4cd2-9976-ca9cd994447e.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1172.052491] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1172.052746] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a97dec89-c13f-4f08-b3b9-10b308f64fb4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.059035] env[68674]: DEBUG oslo_vmware.api [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1172.059035] env[68674]: value = "task-3240974" [ 1172.059035] env[68674]: _type = "Task" [ 1172.059035] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.069069] env[68674]: DEBUG oslo_vmware.api [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240974, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.450795] env[68674]: DEBUG oslo_concurrency.lockutils [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.237s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1172.451360] env[68674]: DEBUG nova.compute.manager [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1172.453950] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 2.585s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1172.454144] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1172.454298] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68674) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1172.454593] env[68674]: DEBUG oslo_concurrency.lockutils [None req-96646f22-75c5-4c62-88fb-743a5165286e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.341s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1172.454795] env[68674]: DEBUG nova.objects.instance [None req-96646f22-75c5-4c62-88fb-743a5165286e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lazy-loading 'resources' on Instance uuid 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1172.457153] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a9dfc0e-58f4-4974-81eb-0d1a27f77f85 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.462489] env[68674]: DEBUG nova.network.neutron [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Updating instance_info_cache with network_info: [{"id": "f751e885-e868-4e41-a9e7-de64b20c643c", "address": "fa:16:3e:cb:db:79", "network": {"id": "dd6a13cc-564e-4e30-a518-536c9c1a1c8d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2104984174-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fcfc3ecd6aa74705aefa88d7a95361a0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf751e885-e8", "ovs_interfaceid": "f751e885-e868-4e41-a9e7-de64b20c643c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1172.467753] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12194c6a-4e66-46f4-bac3-9376db6b3076 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.482084] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f3559ce-9de7-4ebf-aac2-5e67e9abc042 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.489762] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a46ddfdd-d68d-4684-9d83-162ee0960036 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.521453] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179456MB free_disk=120GB free_vcpus=48 pci_devices=None {{(pid=68674) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1172.521616] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1172.570173] env[68674]: DEBUG oslo_vmware.api [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240974, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078964} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.570449] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1172.571214] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d048192b-9786-4ae8-8e06-0ca67390a2d0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.593285] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Reconfiguring VM instance instance-00000077 to attach disk [datastore2] 5fa43d94-64af-4cd2-9976-ca9cd994447e/5fa43d94-64af-4cd2-9976-ca9cd994447e.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1172.593538] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-198cb720-f5a6-48ff-890f-db4136ab6173 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.612146] env[68674]: DEBUG oslo_vmware.api [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1172.612146] env[68674]: value = "task-3240975" [ 1172.612146] env[68674]: _type = "Task" [ 1172.612146] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.620058] env[68674]: DEBUG oslo_vmware.api [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240975, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.964663] env[68674]: DEBUG nova.compute.utils [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1172.966398] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Releasing lock "refresh_cache-9b8aad00-0980-4752-954a-c09c9ae6f9ec" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1172.968833] env[68674]: DEBUG nova.compute.manager [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1172.969012] env[68674]: DEBUG nova.network.neutron [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1172.997556] env[68674]: DEBUG nova.virt.hardware [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='86aefc8768724e478bf7c62a85b00e40',container_format='bare',created_at=2025-04-03T08:16:39Z,direct_url=,disk_format='vmdk',id=4567b890-657d-4354-bb3d-083e8acf9038,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-938117740-shelved',owner='fcfc3ecd6aa74705aefa88d7a95361a0',properties=ImageMetaProps,protected=,size=31666176,status='active',tags=,updated_at=2025-04-03T08:16:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1172.998048] env[68674]: DEBUG nova.virt.hardware [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1172.998327] env[68674]: DEBUG nova.virt.hardware [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1172.998476] env[68674]: DEBUG nova.virt.hardware [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1172.998680] env[68674]: DEBUG nova.virt.hardware [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1172.998856] env[68674]: DEBUG nova.virt.hardware [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1172.999133] env[68674]: DEBUG nova.virt.hardware [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1172.999347] env[68674]: DEBUG nova.virt.hardware [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1172.999559] env[68674]: DEBUG nova.virt.hardware [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1172.999753] env[68674]: DEBUG nova.virt.hardware [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1172.999950] env[68674]: DEBUG nova.virt.hardware [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1173.001161] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2435291-bda2-4def-a2d4-25519f3a2625 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.010358] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc579b4e-1ecf-46c5-b48f-a6954bcec42d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.018011] env[68674]: DEBUG nova.policy [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc1277a660b040b08b95d61e03bbb65f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8e73e759715a4e39a03bd234d918b2fb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 1173.029773] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cb:db:79', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'abcf0d10-3f3f-45dc-923e-1c78766e2dad', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f751e885-e868-4e41-a9e7-de64b20c643c', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1173.037104] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1173.039796] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1173.040164] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3d4b8c46-4f27-47c9-9a31-d33dd2d60ebe {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.062211] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1173.062211] env[68674]: value = "task-3240976" [ 1173.062211] env[68674]: _type = "Task" [ 1173.062211] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.071560] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240976, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.125894] env[68674]: DEBUG oslo_vmware.api [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240975, 'name': ReconfigVM_Task, 'duration_secs': 0.279896} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.126666] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Reconfigured VM instance instance-00000077 to attach disk [datastore2] 5fa43d94-64af-4cd2-9976-ca9cd994447e/5fa43d94-64af-4cd2-9976-ca9cd994447e.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1173.127511] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-718bf27f-3b9c-4b0d-bfef-d3a74609939e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.138951] env[68674]: DEBUG oslo_vmware.api [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1173.138951] env[68674]: value = "task-3240977" [ 1173.138951] env[68674]: _type = "Task" [ 1173.138951] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.148453] env[68674]: DEBUG oslo_vmware.api [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240977, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.162340] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1ee6087-8f49-4cac-9538-5359e3125033 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.171428] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc7714be-7a70-4897-af39-d6023b4c4343 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.202033] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d792c02-00a8-4ba6-a973-7f9a63b03a9b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.210620] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23f9a4ef-edd8-4aae-a49c-0783eea504db {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.225035] env[68674]: DEBUG nova.compute.provider_tree [None req-96646f22-75c5-4c62-88fb-743a5165286e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1173.325375] env[68674]: DEBUG nova.network.neutron [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Successfully created port: f90355a4-7ff9-447e-8dad-d7b710e9c578 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1173.472237] env[68674]: DEBUG nova.compute.manager [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1173.573859] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240976, 'name': CreateVM_Task, 'duration_secs': 0.395189} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.574076] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1173.574771] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4567b890-657d-4354-bb3d-083e8acf9038" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1173.574942] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4567b890-657d-4354-bb3d-083e8acf9038" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1173.575366] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4567b890-657d-4354-bb3d-083e8acf9038" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1173.575648] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b5fa765-3e98-4f58-8d41-db6b76102823 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.581218] env[68674]: DEBUG oslo_vmware.api [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1173.581218] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528ab937-ed06-f1ee-8189-d08e2ff1c60c" [ 1173.581218] env[68674]: _type = "Task" [ 1173.581218] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.585164] env[68674]: DEBUG nova.network.neutron [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Successfully created port: 2d97bf60-6b9f-4ad9-91f0-5a9e0d91e019 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1173.592026] env[68674]: DEBUG oslo_vmware.api [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528ab937-ed06-f1ee-8189-d08e2ff1c60c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.649932] env[68674]: DEBUG oslo_vmware.api [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240977, 'name': Rename_Task, 'duration_secs': 0.151928} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.650271] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1173.650541] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9e6c3750-d944-458c-a066-99eab503e6e7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.659711] env[68674]: DEBUG oslo_vmware.api [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1173.659711] env[68674]: value = "task-3240978" [ 1173.659711] env[68674]: _type = "Task" [ 1173.659711] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.668151] env[68674]: DEBUG nova.compute.manager [req-fa1486a9-b1d9-4bb7-8777-b4ae32b48042 req-6e8b9fe8-26d6-4b45-a984-9bc00353f1a5 service nova] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Received event network-changed-f751e885-e868-4e41-a9e7-de64b20c643c {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1173.668151] env[68674]: DEBUG nova.compute.manager [req-fa1486a9-b1d9-4bb7-8777-b4ae32b48042 req-6e8b9fe8-26d6-4b45-a984-9bc00353f1a5 service nova] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Refreshing instance network info cache due to event network-changed-f751e885-e868-4e41-a9e7-de64b20c643c. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1173.668151] env[68674]: DEBUG oslo_concurrency.lockutils [req-fa1486a9-b1d9-4bb7-8777-b4ae32b48042 req-6e8b9fe8-26d6-4b45-a984-9bc00353f1a5 service nova] Acquiring lock "refresh_cache-9b8aad00-0980-4752-954a-c09c9ae6f9ec" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1173.668151] env[68674]: DEBUG oslo_concurrency.lockutils [req-fa1486a9-b1d9-4bb7-8777-b4ae32b48042 req-6e8b9fe8-26d6-4b45-a984-9bc00353f1a5 service nova] Acquired lock "refresh_cache-9b8aad00-0980-4752-954a-c09c9ae6f9ec" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1173.668151] env[68674]: DEBUG nova.network.neutron [req-fa1486a9-b1d9-4bb7-8777-b4ae32b48042 req-6e8b9fe8-26d6-4b45-a984-9bc00353f1a5 service nova] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Refreshing network info cache for port f751e885-e868-4e41-a9e7-de64b20c643c {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1173.672851] env[68674]: DEBUG oslo_vmware.api [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240978, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.730264] env[68674]: DEBUG nova.scheduler.client.report [None req-96646f22-75c5-4c62-88fb-743a5165286e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1174.096295] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4567b890-657d-4354-bb3d-083e8acf9038" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1174.096620] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Processing image 4567b890-657d-4354-bb3d-083e8acf9038 {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1174.096908] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4567b890-657d-4354-bb3d-083e8acf9038/4567b890-657d-4354-bb3d-083e8acf9038.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1174.097167] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4567b890-657d-4354-bb3d-083e8acf9038/4567b890-657d-4354-bb3d-083e8acf9038.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1174.100438] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1174.100438] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f8da2bf3-37d5-431e-8649-9031cbfeb3ee {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.118107] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1174.118410] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1174.119583] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d97cc6b-90ce-4291-a9e8-5b2219e6d2f3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.126154] env[68674]: DEBUG oslo_vmware.api [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1174.126154] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5255f1df-f7ae-13e1-e357-f622d01aacb8" [ 1174.126154] env[68674]: _type = "Task" [ 1174.126154] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.134076] env[68674]: DEBUG oslo_vmware.api [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5255f1df-f7ae-13e1-e357-f622d01aacb8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.164021] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "c876b288-de2a-4195-bfef-88f38e219d9a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1174.164021] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "c876b288-de2a-4195-bfef-88f38e219d9a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1174.164021] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "c876b288-de2a-4195-bfef-88f38e219d9a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1174.164021] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "c876b288-de2a-4195-bfef-88f38e219d9a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1174.164021] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "c876b288-de2a-4195-bfef-88f38e219d9a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1174.165476] env[68674]: INFO nova.compute.manager [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Terminating instance [ 1174.182304] env[68674]: DEBUG oslo_vmware.api [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240978, 'name': PowerOnVM_Task, 'duration_secs': 0.475018} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.184122] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1174.186041] env[68674]: INFO nova.compute.manager [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Took 7.01 seconds to spawn the instance on the hypervisor. [ 1174.186041] env[68674]: DEBUG nova.compute.manager [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1174.186041] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca3eaea9-1b48-489f-88e9-bcc8297853d8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.223987] env[68674]: DEBUG oslo_vmware.rw_handles [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52806eb7-b044-f504-890a-941e6a408f64/disk-0.vmdk. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1174.225134] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f8852e1-44f5-4982-bed3-8e1e12ce4dcc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.234021] env[68674]: DEBUG oslo_vmware.rw_handles [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52806eb7-b044-f504-890a-941e6a408f64/disk-0.vmdk is in state: ready. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1174.234021] env[68674]: ERROR oslo_vmware.rw_handles [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52806eb7-b044-f504-890a-941e6a408f64/disk-0.vmdk due to incomplete transfer. [ 1174.234021] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-ee9e6105-3984-40bf-a686-367f2c3700ba {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.236123] env[68674]: DEBUG oslo_concurrency.lockutils [None req-96646f22-75c5-4c62-88fb-743a5165286e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.781s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1174.238977] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.565s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1174.241186] env[68674]: INFO nova.compute.claims [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1174.246147] env[68674]: DEBUG oslo_vmware.rw_handles [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52806eb7-b044-f504-890a-941e6a408f64/disk-0.vmdk. {{(pid=68674) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1174.246508] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Uploaded image 62894887-4050-4053-af59-8bdae4e2d98c to the Glance image server {{(pid=68674) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1174.249715] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Destroying the VM {{(pid=68674) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1174.249715] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-1759d03b-1449-4aba-a10a-e43b6f873653 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.258266] env[68674]: DEBUG oslo_vmware.api [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1174.258266] env[68674]: value = "task-3240979" [ 1174.258266] env[68674]: _type = "Task" [ 1174.258266] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.261847] env[68674]: INFO nova.scheduler.client.report [None req-96646f22-75c5-4c62-88fb-743a5165286e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Deleted allocations for instance 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a [ 1174.275213] env[68674]: DEBUG oslo_vmware.api [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240979, 'name': Destroy_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.482064] env[68674]: DEBUG nova.compute.manager [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1174.508657] env[68674]: DEBUG nova.virt.hardware [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1174.508908] env[68674]: DEBUG nova.virt.hardware [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1174.509080] env[68674]: DEBUG nova.virt.hardware [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1174.509266] env[68674]: DEBUG nova.virt.hardware [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1174.509414] env[68674]: DEBUG nova.virt.hardware [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1174.509563] env[68674]: DEBUG nova.virt.hardware [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1174.509809] env[68674]: DEBUG nova.virt.hardware [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1174.509989] env[68674]: DEBUG nova.virt.hardware [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1174.510202] env[68674]: DEBUG nova.virt.hardware [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1174.510369] env[68674]: DEBUG nova.virt.hardware [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1174.510558] env[68674]: DEBUG nova.virt.hardware [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1174.511462] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d0a8676-b1d5-484f-8bc1-273ee3fc0170 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.519910] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e00aab-fc0f-4fad-a51d-5af2d7078596 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.565473] env[68674]: DEBUG nova.network.neutron [req-fa1486a9-b1d9-4bb7-8777-b4ae32b48042 req-6e8b9fe8-26d6-4b45-a984-9bc00353f1a5 service nova] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Updated VIF entry in instance network info cache for port f751e885-e868-4e41-a9e7-de64b20c643c. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1174.565903] env[68674]: DEBUG nova.network.neutron [req-fa1486a9-b1d9-4bb7-8777-b4ae32b48042 req-6e8b9fe8-26d6-4b45-a984-9bc00353f1a5 service nova] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Updating instance_info_cache with network_info: [{"id": "f751e885-e868-4e41-a9e7-de64b20c643c", "address": "fa:16:3e:cb:db:79", "network": {"id": "dd6a13cc-564e-4e30-a518-536c9c1a1c8d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2104984174-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fcfc3ecd6aa74705aefa88d7a95361a0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf751e885-e8", "ovs_interfaceid": "f751e885-e868-4e41-a9e7-de64b20c643c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1174.637689] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Preparing fetch location {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1174.638094] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Fetch image to [datastore2] OSTACK_IMG_42bc610c-ad84-4534-8bca-2fe5f726d59d/OSTACK_IMG_42bc610c-ad84-4534-8bca-2fe5f726d59d.vmdk {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1174.638453] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Downloading stream optimized image 4567b890-657d-4354-bb3d-083e8acf9038 to [datastore2] OSTACK_IMG_42bc610c-ad84-4534-8bca-2fe5f726d59d/OSTACK_IMG_42bc610c-ad84-4534-8bca-2fe5f726d59d.vmdk on the data store datastore2 as vApp {{(pid=68674) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1174.638744] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Downloading image file data 4567b890-657d-4354-bb3d-083e8acf9038 to the ESX as VM named 'OSTACK_IMG_42bc610c-ad84-4534-8bca-2fe5f726d59d' {{(pid=68674) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1174.681565] env[68674]: DEBUG nova.compute.manager [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1174.681812] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1174.682662] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-431656a5-b15d-4517-b529-d9af07182650 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.689669] env[68674]: DEBUG oslo_vmware.api [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 1174.689669] env[68674]: value = "task-3240980" [ 1174.689669] env[68674]: _type = "Task" [ 1174.689669] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.697703] env[68674]: DEBUG oslo_vmware.api [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240980, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.710903] env[68674]: INFO nova.compute.manager [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Took 14.09 seconds to build instance. [ 1174.720760] env[68674]: DEBUG oslo_vmware.rw_handles [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1174.720760] env[68674]: value = "resgroup-9" [ 1174.720760] env[68674]: _type = "ResourcePool" [ 1174.720760] env[68674]: }. {{(pid=68674) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1174.721035] env[68674]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-a9fe8b57-dd01-468b-b92b-861fbc2fc56c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.742652] env[68674]: DEBUG oslo_vmware.rw_handles [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lease: (returnval){ [ 1174.742652] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52496bb8-20f6-1ba0-67bc-c46871ab7736" [ 1174.742652] env[68674]: _type = "HttpNfcLease" [ 1174.742652] env[68674]: } obtained for vApp import into resource pool (val){ [ 1174.742652] env[68674]: value = "resgroup-9" [ 1174.742652] env[68674]: _type = "ResourcePool" [ 1174.742652] env[68674]: }. {{(pid=68674) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1174.742972] env[68674]: DEBUG oslo_vmware.api [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the lease: (returnval){ [ 1174.742972] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52496bb8-20f6-1ba0-67bc-c46871ab7736" [ 1174.742972] env[68674]: _type = "HttpNfcLease" [ 1174.742972] env[68674]: } to be ready. {{(pid=68674) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1174.749238] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1174.749238] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52496bb8-20f6-1ba0-67bc-c46871ab7736" [ 1174.749238] env[68674]: _type = "HttpNfcLease" [ 1174.749238] env[68674]: } is initializing. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1174.766124] env[68674]: DEBUG oslo_vmware.api [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240979, 'name': Destroy_Task, 'duration_secs': 0.350819} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.766966] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Destroyed the VM [ 1174.767244] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Deleting Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1174.767487] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-36d1932f-0f2f-4d7d-bf0a-9b8b7afb1a39 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.774376] env[68674]: DEBUG oslo_concurrency.lockutils [None req-96646f22-75c5-4c62-88fb-743a5165286e tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "142e8ede-90e2-47cf-a1b1-8c4fd59eed0a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.258s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1174.775982] env[68674]: DEBUG oslo_vmware.api [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1174.775982] env[68674]: value = "task-3240982" [ 1174.775982] env[68674]: _type = "Task" [ 1174.775982] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.784276] env[68674]: DEBUG oslo_vmware.api [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240982, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.947219] env[68674]: DEBUG oslo_concurrency.lockutils [None req-44384c45-0af8-44d6-ae19-17bd41442593 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "5fa43d94-64af-4cd2-9976-ca9cd994447e" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1175.068683] env[68674]: DEBUG oslo_concurrency.lockutils [req-fa1486a9-b1d9-4bb7-8777-b4ae32b48042 req-6e8b9fe8-26d6-4b45-a984-9bc00353f1a5 service nova] Releasing lock "refresh_cache-9b8aad00-0980-4752-954a-c09c9ae6f9ec" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1175.201338] env[68674]: DEBUG oslo_vmware.api [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240980, 'name': PowerOffVM_Task, 'duration_secs': 0.201503} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.201338] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1175.201338] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Volume detach. Driver type: vmdk {{(pid=68674) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1175.201338] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647686', 'volume_id': 'ac75a73a-75e6-493d-9caf-c35a3091e391', 'name': 'volume-ac75a73a-75e6-493d-9caf-c35a3091e391', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'c876b288-de2a-4195-bfef-88f38e219d9a', 'attached_at': '2025-04-03T08:16:36.000000', 'detached_at': '', 'volume_id': 'ac75a73a-75e6-493d-9caf-c35a3091e391', 'serial': 'ac75a73a-75e6-493d-9caf-c35a3091e391'} {{(pid=68674) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1175.201338] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-477964cf-0601-4cf3-82be-e77642742034 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.225068] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ddfbec1b-350c-4383-81a4-6edf8f5deff5 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "5fa43d94-64af-4cd2-9976-ca9cd994447e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.613s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1175.225457] env[68674]: DEBUG oslo_concurrency.lockutils [None req-44384c45-0af8-44d6-ae19-17bd41442593 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "5fa43d94-64af-4cd2-9976-ca9cd994447e" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.278s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1175.225642] env[68674]: DEBUG nova.compute.manager [None req-44384c45-0af8-44d6-ae19-17bd41442593 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1175.226554] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d716ec4-6b95-4c53-91f9-c89f18a78d2e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.230613] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-439f48c4-d93e-437c-8729-1e93eb05a0ff {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.237582] env[68674]: DEBUG nova.compute.manager [None req-44384c45-0af8-44d6-ae19-17bd41442593 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68674) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1175.238177] env[68674]: DEBUG nova.objects.instance [None req-44384c45-0af8-44d6-ae19-17bd41442593 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lazy-loading 'flavor' on Instance uuid 5fa43d94-64af-4cd2-9976-ca9cd994447e {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1175.249978] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a9b49f9-dadc-4012-aac4-08d5c0d1fa06 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.273955] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1175.273955] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52496bb8-20f6-1ba0-67bc-c46871ab7736" [ 1175.273955] env[68674]: _type = "HttpNfcLease" [ 1175.273955] env[68674]: } is initializing. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1175.274719] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f92b4e6a-e940-4c30-9a95-9167d3f23f95 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.292670] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] The volume has not been displaced from its original location: [datastore1] volume-ac75a73a-75e6-493d-9caf-c35a3091e391/volume-ac75a73a-75e6-493d-9caf-c35a3091e391.vmdk. No consolidation needed. {{(pid=68674) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1175.298021] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Reconfiguring VM instance instance-00000071 to detach disk 2000 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1175.305262] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-289f1485-c761-41d0-9125-ceff635e59a4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.314663] env[68674]: DEBUG oslo_vmware.api [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240982, 'name': RemoveSnapshot_Task, 'duration_secs': 0.408825} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.317371] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Deleted Snapshot of the VM instance {{(pid=68674) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1175.317647] env[68674]: DEBUG nova.compute.manager [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1175.319032] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b79ae830-f5c8-423b-bc73-1c80c726ebee {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.324366] env[68674]: DEBUG oslo_vmware.api [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 1175.324366] env[68674]: value = "task-3240983" [ 1175.324366] env[68674]: _type = "Task" [ 1175.324366] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.340488] env[68674]: DEBUG oslo_vmware.api [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240983, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.432146] env[68674]: DEBUG nova.network.neutron [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Successfully updated port: f90355a4-7ff9-447e-8dad-d7b710e9c578 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1175.474986] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1c88e1f-2386-4d9a-8a00-619e75ce4c71 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.482441] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c14d868f-80ff-48ad-91de-b69c4b493a43 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.516127] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c07bee8-9795-42f8-bf73-359bd05c1d85 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.524215] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6fe56e3-ed5c-4411-a41a-70acfa7903bd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.537748] env[68674]: DEBUG nova.compute.provider_tree [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1175.698359] env[68674]: DEBUG nova.compute.manager [req-2f1f6d47-4184-422f-8ea7-ec7cce129ae0 req-48094a16-c69d-41ee-861c-325c7ef87858 service nova] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Received event network-vif-plugged-f90355a4-7ff9-447e-8dad-d7b710e9c578 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1175.698574] env[68674]: DEBUG oslo_concurrency.lockutils [req-2f1f6d47-4184-422f-8ea7-ec7cce129ae0 req-48094a16-c69d-41ee-861c-325c7ef87858 service nova] Acquiring lock "0cbfda3e-337f-41f6-add2-1dcd725b0953-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1175.698779] env[68674]: DEBUG oslo_concurrency.lockutils [req-2f1f6d47-4184-422f-8ea7-ec7cce129ae0 req-48094a16-c69d-41ee-861c-325c7ef87858 service nova] Lock "0cbfda3e-337f-41f6-add2-1dcd725b0953-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1175.698947] env[68674]: DEBUG oslo_concurrency.lockutils [req-2f1f6d47-4184-422f-8ea7-ec7cce129ae0 req-48094a16-c69d-41ee-861c-325c7ef87858 service nova] Lock "0cbfda3e-337f-41f6-add2-1dcd725b0953-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1175.699491] env[68674]: DEBUG nova.compute.manager [req-2f1f6d47-4184-422f-8ea7-ec7cce129ae0 req-48094a16-c69d-41ee-861c-325c7ef87858 service nova] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] No waiting events found dispatching network-vif-plugged-f90355a4-7ff9-447e-8dad-d7b710e9c578 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1175.699696] env[68674]: WARNING nova.compute.manager [req-2f1f6d47-4184-422f-8ea7-ec7cce129ae0 req-48094a16-c69d-41ee-861c-325c7ef87858 service nova] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Received unexpected event network-vif-plugged-f90355a4-7ff9-447e-8dad-d7b710e9c578 for instance with vm_state building and task_state spawning. [ 1175.699865] env[68674]: DEBUG nova.compute.manager [req-2f1f6d47-4184-422f-8ea7-ec7cce129ae0 req-48094a16-c69d-41ee-861c-325c7ef87858 service nova] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Received event network-changed-f90355a4-7ff9-447e-8dad-d7b710e9c578 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1175.700034] env[68674]: DEBUG nova.compute.manager [req-2f1f6d47-4184-422f-8ea7-ec7cce129ae0 req-48094a16-c69d-41ee-861c-325c7ef87858 service nova] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Refreshing instance network info cache due to event network-changed-f90355a4-7ff9-447e-8dad-d7b710e9c578. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1175.700324] env[68674]: DEBUG oslo_concurrency.lockutils [req-2f1f6d47-4184-422f-8ea7-ec7cce129ae0 req-48094a16-c69d-41ee-861c-325c7ef87858 service nova] Acquiring lock "refresh_cache-0cbfda3e-337f-41f6-add2-1dcd725b0953" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1175.700406] env[68674]: DEBUG oslo_concurrency.lockutils [req-2f1f6d47-4184-422f-8ea7-ec7cce129ae0 req-48094a16-c69d-41ee-861c-325c7ef87858 service nova] Acquired lock "refresh_cache-0cbfda3e-337f-41f6-add2-1dcd725b0953" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1175.700639] env[68674]: DEBUG nova.network.neutron [req-2f1f6d47-4184-422f-8ea7-ec7cce129ae0 req-48094a16-c69d-41ee-861c-325c7ef87858 service nova] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Refreshing network info cache for port f90355a4-7ff9-447e-8dad-d7b710e9c578 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1175.755347] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1175.755347] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52496bb8-20f6-1ba0-67bc-c46871ab7736" [ 1175.755347] env[68674]: _type = "HttpNfcLease" [ 1175.755347] env[68674]: } is ready. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1175.755623] env[68674]: DEBUG oslo_vmware.rw_handles [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1175.755623] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52496bb8-20f6-1ba0-67bc-c46871ab7736" [ 1175.755623] env[68674]: _type = "HttpNfcLease" [ 1175.755623] env[68674]: }. {{(pid=68674) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1175.756395] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc54fe66-b0f7-4656-8f63-f05c76224eb3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.764787] env[68674]: DEBUG oslo_vmware.rw_handles [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5270e05b-8731-53db-3b29-82d88900ee5d/disk-0.vmdk from lease info. {{(pid=68674) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1175.764787] env[68674]: DEBUG oslo_vmware.rw_handles [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Creating HTTP connection to write to file with size = 31666176 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5270e05b-8731-53db-3b29-82d88900ee5d/disk-0.vmdk. {{(pid=68674) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1175.832778] env[68674]: INFO nova.compute.manager [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Shelve offloading [ 1175.848727] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-797cab0b-9184-4fb9-b5fd-b704a710bdc7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.854282] env[68674]: DEBUG oslo_vmware.api [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240983, 'name': ReconfigVM_Task, 'duration_secs': 0.156003} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.854961] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Reconfigured VM instance instance-00000071 to detach disk 2000 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1175.860761] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-19a2618a-5edb-4313-b6b2-566466608ede {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.875992] env[68674]: DEBUG oslo_vmware.api [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 1175.875992] env[68674]: value = "task-3240984" [ 1175.875992] env[68674]: _type = "Task" [ 1175.875992] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.885456] env[68674]: DEBUG oslo_vmware.api [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240984, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.041050] env[68674]: DEBUG nova.scheduler.client.report [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1176.245568] env[68674]: DEBUG nova.network.neutron [req-2f1f6d47-4184-422f-8ea7-ec7cce129ae0 req-48094a16-c69d-41ee-861c-325c7ef87858 service nova] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1176.252135] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-44384c45-0af8-44d6-ae19-17bd41442593 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1176.252425] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5d8cf850-c635-4351-9402-3873f2e72ffe {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.259535] env[68674]: DEBUG oslo_vmware.api [None req-44384c45-0af8-44d6-ae19-17bd41442593 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1176.259535] env[68674]: value = "task-3240985" [ 1176.259535] env[68674]: _type = "Task" [ 1176.259535] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.267274] env[68674]: DEBUG oslo_vmware.api [None req-44384c45-0af8-44d6-ae19-17bd41442593 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240985, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.336209] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1176.336559] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-154acff7-16ad-469e-9b48-c3b51a090895 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.340139] env[68674]: DEBUG nova.network.neutron [req-2f1f6d47-4184-422f-8ea7-ec7cce129ae0 req-48094a16-c69d-41ee-861c-325c7ef87858 service nova] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1176.347134] env[68674]: DEBUG oslo_vmware.api [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1176.347134] env[68674]: value = "task-3240986" [ 1176.347134] env[68674]: _type = "Task" [ 1176.347134] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.360085] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] VM already powered off {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1176.360314] env[68674]: DEBUG nova.compute.manager [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1176.361182] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83700b40-de46-4262-9167-27be591c45f6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.369032] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "refresh_cache-8f183286-f908-4d05-9a61-d6b1bf10dfb9" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1176.369158] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquired lock "refresh_cache-8f183286-f908-4d05-9a61-d6b1bf10dfb9" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1176.369927] env[68674]: DEBUG nova.network.neutron [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1176.392343] env[68674]: DEBUG oslo_vmware.api [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240984, 'name': ReconfigVM_Task, 'duration_secs': 0.15172} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.394080] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647686', 'volume_id': 'ac75a73a-75e6-493d-9caf-c35a3091e391', 'name': 'volume-ac75a73a-75e6-493d-9caf-c35a3091e391', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'c876b288-de2a-4195-bfef-88f38e219d9a', 'attached_at': '2025-04-03T08:16:36.000000', 'detached_at': '', 'volume_id': 'ac75a73a-75e6-493d-9caf-c35a3091e391', 'serial': 'ac75a73a-75e6-493d-9caf-c35a3091e391'} {{(pid=68674) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1176.394488] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1176.395571] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e5b273c-aa7f-414e-ac56-69d280eaf20e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.407655] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1176.409099] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f5d7e13e-ec8f-4e27-90be-a4ec6ebc8828 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.484489] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1176.484689] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1176.484847] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Deleting the datastore file [datastore1] c876b288-de2a-4195-bfef-88f38e219d9a {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1176.485230] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-69e93768-8eba-4689-8238-37ff2dec3cc0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.493024] env[68674]: DEBUG oslo_vmware.api [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 1176.493024] env[68674]: value = "task-3240988" [ 1176.493024] env[68674]: _type = "Task" [ 1176.493024] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.506056] env[68674]: DEBUG oslo_vmware.api [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240988, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.547045] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.307s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1176.547045] env[68674]: DEBUG nova.compute.manager [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1176.554369] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 4.033s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1176.769574] env[68674]: DEBUG oslo_vmware.api [None req-44384c45-0af8-44d6-ae19-17bd41442593 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240985, 'name': PowerOffVM_Task, 'duration_secs': 0.234958} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.771358] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-44384c45-0af8-44d6-ae19-17bd41442593 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1176.771611] env[68674]: DEBUG nova.compute.manager [None req-44384c45-0af8-44d6-ae19-17bd41442593 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1176.772426] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e835f4b9-80f7-4c57-a025-24537017ff6f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.843508] env[68674]: DEBUG oslo_concurrency.lockutils [req-2f1f6d47-4184-422f-8ea7-ec7cce129ae0 req-48094a16-c69d-41ee-861c-325c7ef87858 service nova] Releasing lock "refresh_cache-0cbfda3e-337f-41f6-add2-1dcd725b0953" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1177.005824] env[68674]: DEBUG oslo_vmware.api [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3240988, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.097324} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.010042] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1177.010321] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1177.010465] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1177.010640] env[68674]: INFO nova.compute.manager [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Took 2.33 seconds to destroy the instance on the hypervisor. [ 1177.010884] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1177.011125] env[68674]: DEBUG nova.compute.manager [-] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1177.011237] env[68674]: DEBUG nova.network.neutron [-] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1177.027433] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquiring lock "f4751bd8-e0df-4686-a22f-e51a4a98b8d6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1177.027795] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "f4751bd8-e0df-4686-a22f-e51a4a98b8d6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1177.065786] env[68674]: DEBUG nova.compute.utils [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1177.071537] env[68674]: DEBUG nova.compute.manager [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1177.071725] env[68674]: DEBUG nova.network.neutron [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1177.184754] env[68674]: DEBUG nova.policy [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7b3a4c2c5bae41998d58a116e648883d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aa34d6d90c6d46aaa2cb77259b5e0c27', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 1177.215411] env[68674]: DEBUG oslo_vmware.rw_handles [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Completed reading data from the image iterator. {{(pid=68674) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1177.215616] env[68674]: DEBUG oslo_vmware.rw_handles [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5270e05b-8731-53db-3b29-82d88900ee5d/disk-0.vmdk. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1177.216893] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0a4646b-3c7e-4fa5-bde5-4cd09d6b637f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.224465] env[68674]: DEBUG oslo_vmware.rw_handles [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5270e05b-8731-53db-3b29-82d88900ee5d/disk-0.vmdk is in state: ready. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1177.224647] env[68674]: DEBUG oslo_vmware.rw_handles [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5270e05b-8731-53db-3b29-82d88900ee5d/disk-0.vmdk. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1177.224927] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-ff6b0737-a565-4c66-8b76-549487fb25d1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.284400] env[68674]: DEBUG oslo_concurrency.lockutils [None req-44384c45-0af8-44d6-ae19-17bd41442593 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "5fa43d94-64af-4cd2-9976-ca9cd994447e" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.059s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1177.304878] env[68674]: DEBUG nova.network.neutron [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Updating instance_info_cache with network_info: [{"id": "0f444395-3a03-4d13-9c2e-20a2965fcb9b", "address": "fa:16:3e:34:66:f6", "network": {"id": "cd9a6296-fa96-4117-b8b5-3884d0d16745", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1543887384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61ea6bfeb37d470a970e9c98e4827ade", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f444395-3a", "ovs_interfaceid": "0f444395-3a03-4d13-9c2e-20a2965fcb9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1177.472183] env[68674]: DEBUG oslo_vmware.rw_handles [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5270e05b-8731-53db-3b29-82d88900ee5d/disk-0.vmdk. {{(pid=68674) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1177.472458] env[68674]: INFO nova.virt.vmwareapi.images [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Downloaded image file data 4567b890-657d-4354-bb3d-083e8acf9038 [ 1177.473344] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91489130-eb74-4d01-9165-18d91578fce6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.492213] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-79dbd562-f42b-405d-8c4d-da8265d7169b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.531832] env[68674]: DEBUG nova.compute.manager [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1177.534661] env[68674]: INFO nova.virt.vmwareapi.images [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] The imported VM was unregistered [ 1177.537158] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Caching image {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1177.537403] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Creating directory with path [datastore2] devstack-image-cache_base/4567b890-657d-4354-bb3d-083e8acf9038 {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1177.537912] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5b3914e8-3263-4c6a-8adf-569618be8e59 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.548244] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Created directory with path [datastore2] devstack-image-cache_base/4567b890-657d-4354-bb3d-083e8acf9038 {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1177.548516] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_42bc610c-ad84-4534-8bca-2fe5f726d59d/OSTACK_IMG_42bc610c-ad84-4534-8bca-2fe5f726d59d.vmdk to [datastore2] devstack-image-cache_base/4567b890-657d-4354-bb3d-083e8acf9038/4567b890-657d-4354-bb3d-083e8acf9038.vmdk. {{(pid=68674) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1177.549283] env[68674]: DEBUG nova.network.neutron [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Successfully updated port: 2d97bf60-6b9f-4ad9-91f0-5a9e0d91e019 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1177.550372] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-e0b0c974-2be9-470a-8b5b-4689a8639085 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.558019] env[68674]: DEBUG oslo_vmware.api [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1177.558019] env[68674]: value = "task-3240990" [ 1177.558019] env[68674]: _type = "Task" [ 1177.558019] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.567013] env[68674]: DEBUG oslo_vmware.api [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240990, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.574306] env[68674]: DEBUG nova.compute.manager [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1177.589507] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 23891bad-1b63-4237-9243-78954cf67d52 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1177.589691] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 30731a3c-34ba-40c8-9b8f-2d867eff4f21 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1177.589819] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance dbbf1313-6e44-45e2-8bf6-83409f06cb4b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1177.589951] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 8f183286-f908-4d05-9a61-d6b1bf10dfb9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1177.590077] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance c876b288-de2a-4195-bfef-88f38e219d9a actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1177.590202] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1177.590315] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance ba4bfbb4-a89b-4ab6-964e-792647fd5a89 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1177.590488] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 5fa43d94-64af-4cd2-9976-ca9cd994447e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1177.590647] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 9b8aad00-0980-4752-954a-c09c9ae6f9ec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1177.590764] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 0cbfda3e-337f-41f6-add2-1dcd725b0953 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1177.590890] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1177.811044] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Releasing lock "refresh_cache-8f183286-f908-4d05-9a61-d6b1bf10dfb9" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1177.836461] env[68674]: DEBUG nova.compute.manager [req-1ce6fe41-04ca-4d62-bc50-84c011ec1871 req-e4bb5429-ddab-41e3-9f31-c52daf0cfd80 service nova] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Received event network-vif-plugged-2d97bf60-6b9f-4ad9-91f0-5a9e0d91e019 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1177.836802] env[68674]: DEBUG oslo_concurrency.lockutils [req-1ce6fe41-04ca-4d62-bc50-84c011ec1871 req-e4bb5429-ddab-41e3-9f31-c52daf0cfd80 service nova] Acquiring lock "0cbfda3e-337f-41f6-add2-1dcd725b0953-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1177.836983] env[68674]: DEBUG oslo_concurrency.lockutils [req-1ce6fe41-04ca-4d62-bc50-84c011ec1871 req-e4bb5429-ddab-41e3-9f31-c52daf0cfd80 service nova] Lock "0cbfda3e-337f-41f6-add2-1dcd725b0953-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1177.837207] env[68674]: DEBUG oslo_concurrency.lockutils [req-1ce6fe41-04ca-4d62-bc50-84c011ec1871 req-e4bb5429-ddab-41e3-9f31-c52daf0cfd80 service nova] Lock "0cbfda3e-337f-41f6-add2-1dcd725b0953-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1177.837409] env[68674]: DEBUG nova.compute.manager [req-1ce6fe41-04ca-4d62-bc50-84c011ec1871 req-e4bb5429-ddab-41e3-9f31-c52daf0cfd80 service nova] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] No waiting events found dispatching network-vif-plugged-2d97bf60-6b9f-4ad9-91f0-5a9e0d91e019 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1177.837611] env[68674]: WARNING nova.compute.manager [req-1ce6fe41-04ca-4d62-bc50-84c011ec1871 req-e4bb5429-ddab-41e3-9f31-c52daf0cfd80 service nova] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Received unexpected event network-vif-plugged-2d97bf60-6b9f-4ad9-91f0-5a9e0d91e019 for instance with vm_state building and task_state spawning. [ 1177.837806] env[68674]: DEBUG nova.compute.manager [req-1ce6fe41-04ca-4d62-bc50-84c011ec1871 req-e4bb5429-ddab-41e3-9f31-c52daf0cfd80 service nova] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Received event network-changed-2d97bf60-6b9f-4ad9-91f0-5a9e0d91e019 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1177.837973] env[68674]: DEBUG nova.compute.manager [req-1ce6fe41-04ca-4d62-bc50-84c011ec1871 req-e4bb5429-ddab-41e3-9f31-c52daf0cfd80 service nova] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Refreshing instance network info cache due to event network-changed-2d97bf60-6b9f-4ad9-91f0-5a9e0d91e019. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1177.838242] env[68674]: DEBUG oslo_concurrency.lockutils [req-1ce6fe41-04ca-4d62-bc50-84c011ec1871 req-e4bb5429-ddab-41e3-9f31-c52daf0cfd80 service nova] Acquiring lock "refresh_cache-0cbfda3e-337f-41f6-add2-1dcd725b0953" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1177.838403] env[68674]: DEBUG oslo_concurrency.lockutils [req-1ce6fe41-04ca-4d62-bc50-84c011ec1871 req-e4bb5429-ddab-41e3-9f31-c52daf0cfd80 service nova] Acquired lock "refresh_cache-0cbfda3e-337f-41f6-add2-1dcd725b0953" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1177.838570] env[68674]: DEBUG nova.network.neutron [req-1ce6fe41-04ca-4d62-bc50-84c011ec1871 req-e4bb5429-ddab-41e3-9f31-c52daf0cfd80 service nova] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Refreshing network info cache for port 2d97bf60-6b9f-4ad9-91f0-5a9e0d91e019 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1177.881847] env[68674]: DEBUG nova.network.neutron [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Successfully created port: 43bd7986-ab0b-4dd8-a224-a42cd649e0d0 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1178.030211] env[68674]: DEBUG nova.compute.manager [req-8c102bad-bc4c-499f-953f-348cdf2c3092 req-e5fba0aa-e079-4a60-8b4a-0087e0e73aa8 service nova] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Received event network-vif-deleted-15b18361-a1c9-4dab-bcaf-6a40837d6bbe {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1178.030369] env[68674]: INFO nova.compute.manager [req-8c102bad-bc4c-499f-953f-348cdf2c3092 req-e5fba0aa-e079-4a60-8b4a-0087e0e73aa8 service nova] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Neutron deleted interface 15b18361-a1c9-4dab-bcaf-6a40837d6bbe; detaching it from the instance and deleting it from the info cache [ 1178.030583] env[68674]: DEBUG nova.network.neutron [req-8c102bad-bc4c-499f-953f-348cdf2c3092 req-e5fba0aa-e079-4a60-8b4a-0087e0e73aa8 service nova] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1178.053939] env[68674]: DEBUG oslo_concurrency.lockutils [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Acquiring lock "refresh_cache-0cbfda3e-337f-41f6-add2-1dcd725b0953" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1178.065649] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1178.077674] env[68674]: DEBUG oslo_vmware.api [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240990, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.093781] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance f4751bd8-e0df-4686-a22f-e51a4a98b8d6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1178.094193] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Total usable vcpus: 48, total allocated vcpus: 11 {{(pid=68674) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1178.094193] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2688MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=11 pci_stats=[] {{(pid=68674) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1178.217754] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1178.217859] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65241efb-7842-4a36-88cd-5567ff9af8a2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.232324] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1178.232324] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-677c307c-3bdd-4aa6-a141-b3be4b64dd7e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.305262] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1178.305571] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1178.305864] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Deleting the datastore file [datastore1] 8f183286-f908-4d05-9a61-d6b1bf10dfb9 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1178.306210] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ad48f54b-c475-4372-b16e-7fc622112f3d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.314948] env[68674]: DEBUG oslo_vmware.api [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1178.314948] env[68674]: value = "task-3240992" [ 1178.314948] env[68674]: _type = "Task" [ 1178.314948] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.321118] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faf50f69-c4eb-49a6-81a4-15559a5ef16f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.331131] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-624dcccc-37a8-4629-96a4-0c93fc6fcabc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.334745] env[68674]: DEBUG oslo_vmware.api [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240992, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.365949] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9795cae2-e7d2-481a-b637-c2c889e19452 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.375336] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e63117c-b2d9-4cb0-a9bf-b39745a8162b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.393446] env[68674]: DEBUG nova.compute.provider_tree [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1178.401606] env[68674]: DEBUG nova.network.neutron [req-1ce6fe41-04ca-4d62-bc50-84c011ec1871 req-e4bb5429-ddab-41e3-9f31-c52daf0cfd80 service nova] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1178.483615] env[68674]: DEBUG nova.network.neutron [req-1ce6fe41-04ca-4d62-bc50-84c011ec1871 req-e4bb5429-ddab-41e3-9f31-c52daf0cfd80 service nova] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1178.508370] env[68674]: DEBUG nova.network.neutron [-] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1178.537194] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8470c002-5783-48a7-b126-0581a0c414ff {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.551749] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be43916d-d340-4b97-b3bf-5a24a169717d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.572661] env[68674]: DEBUG oslo_concurrency.lockutils [None req-88d8b185-0ee4-49a2-ba28-8c3cde2f552c tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "5fa43d94-64af-4cd2-9976-ca9cd994447e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1178.572940] env[68674]: DEBUG oslo_concurrency.lockutils [None req-88d8b185-0ee4-49a2-ba28-8c3cde2f552c tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "5fa43d94-64af-4cd2-9976-ca9cd994447e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1178.573173] env[68674]: DEBUG oslo_concurrency.lockutils [None req-88d8b185-0ee4-49a2-ba28-8c3cde2f552c tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "5fa43d94-64af-4cd2-9976-ca9cd994447e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1178.573361] env[68674]: DEBUG oslo_concurrency.lockutils [None req-88d8b185-0ee4-49a2-ba28-8c3cde2f552c tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "5fa43d94-64af-4cd2-9976-ca9cd994447e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1178.573528] env[68674]: DEBUG oslo_concurrency.lockutils [None req-88d8b185-0ee4-49a2-ba28-8c3cde2f552c tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "5fa43d94-64af-4cd2-9976-ca9cd994447e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1178.575296] env[68674]: DEBUG oslo_vmware.api [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240990, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.575820] env[68674]: INFO nova.compute.manager [None req-88d8b185-0ee4-49a2-ba28-8c3cde2f552c tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Terminating instance [ 1178.590224] env[68674]: DEBUG nova.compute.manager [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1178.593243] env[68674]: DEBUG nova.compute.manager [req-8c102bad-bc4c-499f-953f-348cdf2c3092 req-e5fba0aa-e079-4a60-8b4a-0087e0e73aa8 service nova] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Detach interface failed, port_id=15b18361-a1c9-4dab-bcaf-6a40837d6bbe, reason: Instance c876b288-de2a-4195-bfef-88f38e219d9a could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1178.629386] env[68674]: DEBUG nova.virt.hardware [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1178.629677] env[68674]: DEBUG nova.virt.hardware [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1178.629843] env[68674]: DEBUG nova.virt.hardware [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1178.630057] env[68674]: DEBUG nova.virt.hardware [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1178.630213] env[68674]: DEBUG nova.virt.hardware [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1178.630362] env[68674]: DEBUG nova.virt.hardware [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1178.630593] env[68674]: DEBUG nova.virt.hardware [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1178.630761] env[68674]: DEBUG nova.virt.hardware [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1178.630934] env[68674]: DEBUG nova.virt.hardware [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1178.631128] env[68674]: DEBUG nova.virt.hardware [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1178.631307] env[68674]: DEBUG nova.virt.hardware [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1178.632636] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be57d656-b627-4359-b602-7275ee61e4ff {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.645263] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f4dc0d4-f512-4345-aa83-9c413e6856c3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.826847] env[68674]: DEBUG oslo_vmware.api [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3240992, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.35084} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.827240] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1178.827435] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1178.827662] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1178.852099] env[68674]: INFO nova.scheduler.client.report [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Deleted allocations for instance 8f183286-f908-4d05-9a61-d6b1bf10dfb9 [ 1178.896889] env[68674]: DEBUG nova.scheduler.client.report [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1178.987132] env[68674]: DEBUG oslo_concurrency.lockutils [req-1ce6fe41-04ca-4d62-bc50-84c011ec1871 req-e4bb5429-ddab-41e3-9f31-c52daf0cfd80 service nova] Releasing lock "refresh_cache-0cbfda3e-337f-41f6-add2-1dcd725b0953" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1178.987960] env[68674]: DEBUG oslo_concurrency.lockutils [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Acquired lock "refresh_cache-0cbfda3e-337f-41f6-add2-1dcd725b0953" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1178.988136] env[68674]: DEBUG nova.network.neutron [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1179.010790] env[68674]: INFO nova.compute.manager [-] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Took 2.00 seconds to deallocate network for instance. [ 1179.076705] env[68674]: DEBUG oslo_vmware.api [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240990, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.080546] env[68674]: DEBUG nova.compute.manager [None req-88d8b185-0ee4-49a2-ba28-8c3cde2f552c tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1179.080840] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-88d8b185-0ee4-49a2-ba28-8c3cde2f552c tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1179.081735] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-195a7a0a-cbe0-4665-b59a-2f750055166a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.091130] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-88d8b185-0ee4-49a2-ba28-8c3cde2f552c tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1179.091401] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-60568f20-f77e-4222-8d4a-dc3c1670bd53 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.208332] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-88d8b185-0ee4-49a2-ba28-8c3cde2f552c tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1179.208584] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-88d8b185-0ee4-49a2-ba28-8c3cde2f552c tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1179.208777] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-88d8b185-0ee4-49a2-ba28-8c3cde2f552c tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Deleting the datastore file [datastore2] 5fa43d94-64af-4cd2-9976-ca9cd994447e {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1179.209123] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f24a41c6-12c8-453f-867c-be23bf08a31f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.220117] env[68674]: DEBUG oslo_vmware.api [None req-88d8b185-0ee4-49a2-ba28-8c3cde2f552c tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1179.220117] env[68674]: value = "task-3240994" [ 1179.220117] env[68674]: _type = "Task" [ 1179.220117] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.231139] env[68674]: DEBUG oslo_vmware.api [None req-88d8b185-0ee4-49a2-ba28-8c3cde2f552c tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240994, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.357506] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1179.405030] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68674) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1179.405030] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.848s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1179.405030] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.338s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1179.405030] env[68674]: INFO nova.compute.claims [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1179.527901] env[68674]: DEBUG nova.network.neutron [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1179.559821] env[68674]: INFO nova.compute.manager [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Took 0.55 seconds to detach 1 volumes for instance. [ 1179.562063] env[68674]: DEBUG nova.compute.manager [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Deleting volume: ac75a73a-75e6-493d-9caf-c35a3091e391 {{(pid=68674) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1179.575552] env[68674]: DEBUG nova.network.neutron [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Successfully updated port: 43bd7986-ab0b-4dd8-a224-a42cd649e0d0 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1179.583655] env[68674]: DEBUG oslo_vmware.api [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240990, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.734266] env[68674]: DEBUG oslo_vmware.api [None req-88d8b185-0ee4-49a2-ba28-8c3cde2f552c tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240994, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.853071] env[68674]: DEBUG nova.network.neutron [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Updating instance_info_cache with network_info: [{"id": "f90355a4-7ff9-447e-8dad-d7b710e9c578", "address": "fa:16:3e:40:b1:eb", "network": {"id": "2a102a33-dcc9-4e88-bc9b-0ddd2c11fe2a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1952956829", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e73e759715a4e39a03bd234d918b2fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b94712a6-b777-47dd-bc06-f9acfce2d936", "external-id": "nsx-vlan-transportzone-494", "segmentation_id": 494, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf90355a4-7f", "ovs_interfaceid": "f90355a4-7ff9-447e-8dad-d7b710e9c578", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2d97bf60-6b9f-4ad9-91f0-5a9e0d91e019", "address": "fa:16:3e:d5:a7:ae", "network": {"id": "28ec6ea8-fe93-40c3-a7cc-64c280d1e93f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-247624441", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.126", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "8e73e759715a4e39a03bd234d918b2fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9f430b-e6f7-4a47-abd0-3cc7bef3e97c", "external-id": "nsx-vlan-transportzone-977", "segmentation_id": 977, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d97bf60-6b", "ovs_interfaceid": "2d97bf60-6b9f-4ad9-91f0-5a9e0d91e019", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1179.869639] env[68674]: DEBUG nova.compute.manager [req-d44e2786-924b-4e71-aac1-43f338be56cc req-bce7e2be-0550-450e-93b2-c2462a9c6142 service nova] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Received event network-vif-unplugged-0f444395-3a03-4d13-9c2e-20a2965fcb9b {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1179.869890] env[68674]: DEBUG oslo_concurrency.lockutils [req-d44e2786-924b-4e71-aac1-43f338be56cc req-bce7e2be-0550-450e-93b2-c2462a9c6142 service nova] Acquiring lock "8f183286-f908-4d05-9a61-d6b1bf10dfb9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1179.870067] env[68674]: DEBUG oslo_concurrency.lockutils [req-d44e2786-924b-4e71-aac1-43f338be56cc req-bce7e2be-0550-450e-93b2-c2462a9c6142 service nova] Lock "8f183286-f908-4d05-9a61-d6b1bf10dfb9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1179.870240] env[68674]: DEBUG oslo_concurrency.lockutils [req-d44e2786-924b-4e71-aac1-43f338be56cc req-bce7e2be-0550-450e-93b2-c2462a9c6142 service nova] Lock "8f183286-f908-4d05-9a61-d6b1bf10dfb9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1179.870409] env[68674]: DEBUG nova.compute.manager [req-d44e2786-924b-4e71-aac1-43f338be56cc req-bce7e2be-0550-450e-93b2-c2462a9c6142 service nova] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] No waiting events found dispatching network-vif-unplugged-0f444395-3a03-4d13-9c2e-20a2965fcb9b {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1179.870580] env[68674]: WARNING nova.compute.manager [req-d44e2786-924b-4e71-aac1-43f338be56cc req-bce7e2be-0550-450e-93b2-c2462a9c6142 service nova] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Received unexpected event network-vif-unplugged-0f444395-3a03-4d13-9c2e-20a2965fcb9b for instance with vm_state shelved_offloaded and task_state None. [ 1179.870743] env[68674]: DEBUG nova.compute.manager [req-d44e2786-924b-4e71-aac1-43f338be56cc req-bce7e2be-0550-450e-93b2-c2462a9c6142 service nova] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Received event network-changed-0f444395-3a03-4d13-9c2e-20a2965fcb9b {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1179.870901] env[68674]: DEBUG nova.compute.manager [req-d44e2786-924b-4e71-aac1-43f338be56cc req-bce7e2be-0550-450e-93b2-c2462a9c6142 service nova] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Refreshing instance network info cache due to event network-changed-0f444395-3a03-4d13-9c2e-20a2965fcb9b. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1179.871098] env[68674]: DEBUG oslo_concurrency.lockutils [req-d44e2786-924b-4e71-aac1-43f338be56cc req-bce7e2be-0550-450e-93b2-c2462a9c6142 service nova] Acquiring lock "refresh_cache-8f183286-f908-4d05-9a61-d6b1bf10dfb9" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1179.871233] env[68674]: DEBUG oslo_concurrency.lockutils [req-d44e2786-924b-4e71-aac1-43f338be56cc req-bce7e2be-0550-450e-93b2-c2462a9c6142 service nova] Acquired lock "refresh_cache-8f183286-f908-4d05-9a61-d6b1bf10dfb9" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1179.871387] env[68674]: DEBUG nova.network.neutron [req-d44e2786-924b-4e71-aac1-43f338be56cc req-bce7e2be-0550-450e-93b2-c2462a9c6142 service nova] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Refreshing network info cache for port 0f444395-3a03-4d13-9c2e-20a2965fcb9b {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1180.054300] env[68674]: DEBUG nova.compute.manager [req-a78b71d3-57e2-416d-84d6-4de7c1d387e4 req-b1e4ee35-be13-4a56-9eb6-ab374fb06d7c service nova] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Received event network-vif-plugged-43bd7986-ab0b-4dd8-a224-a42cd649e0d0 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1180.054536] env[68674]: DEBUG oslo_concurrency.lockutils [req-a78b71d3-57e2-416d-84d6-4de7c1d387e4 req-b1e4ee35-be13-4a56-9eb6-ab374fb06d7c service nova] Acquiring lock "1b276f5a-9e53-4ef9-892b-4e4bd0dc09df-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1180.054790] env[68674]: DEBUG oslo_concurrency.lockutils [req-a78b71d3-57e2-416d-84d6-4de7c1d387e4 req-b1e4ee35-be13-4a56-9eb6-ab374fb06d7c service nova] Lock "1b276f5a-9e53-4ef9-892b-4e4bd0dc09df-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1180.056031] env[68674]: DEBUG oslo_concurrency.lockutils [req-a78b71d3-57e2-416d-84d6-4de7c1d387e4 req-b1e4ee35-be13-4a56-9eb6-ab374fb06d7c service nova] Lock "1b276f5a-9e53-4ef9-892b-4e4bd0dc09df-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1180.056031] env[68674]: DEBUG nova.compute.manager [req-a78b71d3-57e2-416d-84d6-4de7c1d387e4 req-b1e4ee35-be13-4a56-9eb6-ab374fb06d7c service nova] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] No waiting events found dispatching network-vif-plugged-43bd7986-ab0b-4dd8-a224-a42cd649e0d0 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1180.056031] env[68674]: WARNING nova.compute.manager [req-a78b71d3-57e2-416d-84d6-4de7c1d387e4 req-b1e4ee35-be13-4a56-9eb6-ab374fb06d7c service nova] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Received unexpected event network-vif-plugged-43bd7986-ab0b-4dd8-a224-a42cd649e0d0 for instance with vm_state building and task_state spawning. [ 1180.056031] env[68674]: DEBUG nova.compute.manager [req-a78b71d3-57e2-416d-84d6-4de7c1d387e4 req-b1e4ee35-be13-4a56-9eb6-ab374fb06d7c service nova] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Received event network-changed-43bd7986-ab0b-4dd8-a224-a42cd649e0d0 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1180.056031] env[68674]: DEBUG nova.compute.manager [req-a78b71d3-57e2-416d-84d6-4de7c1d387e4 req-b1e4ee35-be13-4a56-9eb6-ab374fb06d7c service nova] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Refreshing instance network info cache due to event network-changed-43bd7986-ab0b-4dd8-a224-a42cd649e0d0. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1180.056031] env[68674]: DEBUG oslo_concurrency.lockutils [req-a78b71d3-57e2-416d-84d6-4de7c1d387e4 req-b1e4ee35-be13-4a56-9eb6-ab374fb06d7c service nova] Acquiring lock "refresh_cache-1b276f5a-9e53-4ef9-892b-4e4bd0dc09df" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1180.056310] env[68674]: DEBUG oslo_concurrency.lockutils [req-a78b71d3-57e2-416d-84d6-4de7c1d387e4 req-b1e4ee35-be13-4a56-9eb6-ab374fb06d7c service nova] Acquired lock "refresh_cache-1b276f5a-9e53-4ef9-892b-4e4bd0dc09df" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1180.056310] env[68674]: DEBUG nova.network.neutron [req-a78b71d3-57e2-416d-84d6-4de7c1d387e4 req-b1e4ee35-be13-4a56-9eb6-ab374fb06d7c service nova] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Refreshing network info cache for port 43bd7986-ab0b-4dd8-a224-a42cd649e0d0 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1180.074718] env[68674]: DEBUG oslo_vmware.api [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240990, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.356585} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.074979] env[68674]: INFO nova.virt.vmwareapi.ds_util [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_42bc610c-ad84-4534-8bca-2fe5f726d59d/OSTACK_IMG_42bc610c-ad84-4534-8bca-2fe5f726d59d.vmdk to [datastore2] devstack-image-cache_base/4567b890-657d-4354-bb3d-083e8acf9038/4567b890-657d-4354-bb3d-083e8acf9038.vmdk. [ 1180.075206] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Cleaning up location [datastore2] OSTACK_IMG_42bc610c-ad84-4534-8bca-2fe5f726d59d {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1180.075379] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_42bc610c-ad84-4534-8bca-2fe5f726d59d {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1180.075609] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-320f1cd2-75e5-48dd-af90-cc30d1d10bb1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.081977] env[68674]: DEBUG oslo_vmware.api [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1180.081977] env[68674]: value = "task-3240996" [ 1180.081977] env[68674]: _type = "Task" [ 1180.081977] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.085175] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "refresh_cache-1b276f5a-9e53-4ef9-892b-4e4bd0dc09df" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1180.090117] env[68674]: DEBUG oslo_vmware.api [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240996, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.146469] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1180.232476] env[68674]: DEBUG oslo_vmware.api [None req-88d8b185-0ee4-49a2-ba28-8c3cde2f552c tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3240994, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.734956} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.232744] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-88d8b185-0ee4-49a2-ba28-8c3cde2f552c tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1180.232930] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-88d8b185-0ee4-49a2-ba28-8c3cde2f552c tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1180.233126] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-88d8b185-0ee4-49a2-ba28-8c3cde2f552c tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1180.233307] env[68674]: INFO nova.compute.manager [None req-88d8b185-0ee4-49a2-ba28-8c3cde2f552c tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1180.233545] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-88d8b185-0ee4-49a2-ba28-8c3cde2f552c tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1180.234044] env[68674]: DEBUG nova.compute.manager [-] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1180.234044] env[68674]: DEBUG nova.network.neutron [-] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1180.355573] env[68674]: DEBUG oslo_concurrency.lockutils [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Releasing lock "refresh_cache-0cbfda3e-337f-41f6-add2-1dcd725b0953" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1180.356041] env[68674]: DEBUG nova.compute.manager [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Instance network_info: |[{"id": "f90355a4-7ff9-447e-8dad-d7b710e9c578", "address": "fa:16:3e:40:b1:eb", "network": {"id": "2a102a33-dcc9-4e88-bc9b-0ddd2c11fe2a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1952956829", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e73e759715a4e39a03bd234d918b2fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b94712a6-b777-47dd-bc06-f9acfce2d936", "external-id": "nsx-vlan-transportzone-494", "segmentation_id": 494, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf90355a4-7f", "ovs_interfaceid": "f90355a4-7ff9-447e-8dad-d7b710e9c578", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2d97bf60-6b9f-4ad9-91f0-5a9e0d91e019", "address": "fa:16:3e:d5:a7:ae", "network": {"id": "28ec6ea8-fe93-40c3-a7cc-64c280d1e93f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-247624441", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.126", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "8e73e759715a4e39a03bd234d918b2fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9f430b-e6f7-4a47-abd0-3cc7bef3e97c", "external-id": "nsx-vlan-transportzone-977", "segmentation_id": 977, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d97bf60-6b", "ovs_interfaceid": "2d97bf60-6b9f-4ad9-91f0-5a9e0d91e019", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1180.356499] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:40:b1:eb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b94712a6-b777-47dd-bc06-f9acfce2d936', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f90355a4-7ff9-447e-8dad-d7b710e9c578', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:d5:a7:ae', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1a9f430b-e6f7-4a47-abd0-3cc7bef3e97c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2d97bf60-6b9f-4ad9-91f0-5a9e0d91e019', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1180.367525] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1180.367802] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1180.367976] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fe0de049-5bd5-4ec9-8dd9-4a6b9c8aeb80 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.395987] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1180.395987] env[68674]: value = "task-3240997" [ 1180.395987] env[68674]: _type = "Task" [ 1180.395987] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.406919] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240997, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.564159] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f733ca07-4571-4742-af7d-a5699e333998 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.571900] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7055a9bb-f48c-4b93-930e-fc7b54e87182 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.603224] env[68674]: DEBUG nova.network.neutron [req-a78b71d3-57e2-416d-84d6-4de7c1d387e4 req-b1e4ee35-be13-4a56-9eb6-ab374fb06d7c service nova] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1180.610208] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fabc146-c68f-4c3f-b775-891b31c619a0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.620999] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfd55970-7d96-4090-b411-f0d48d9a2f63 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.624698] env[68674]: DEBUG oslo_vmware.api [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240996, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.03308} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.624955] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1180.625151] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4567b890-657d-4354-bb3d-083e8acf9038/4567b890-657d-4354-bb3d-083e8acf9038.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1180.625430] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4567b890-657d-4354-bb3d-083e8acf9038/4567b890-657d-4354-bb3d-083e8acf9038.vmdk to [datastore2] 9b8aad00-0980-4752-954a-c09c9ae6f9ec/9b8aad00-0980-4752-954a-c09c9ae6f9ec.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1180.626041] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c06e0496-cdcf-4a5b-8ec8-350bf87797c0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.635486] env[68674]: DEBUG nova.compute.provider_tree [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1180.641176] env[68674]: DEBUG oslo_vmware.api [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1180.641176] env[68674]: value = "task-3240998" [ 1180.641176] env[68674]: _type = "Task" [ 1180.641176] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.645146] env[68674]: DEBUG nova.network.neutron [req-d44e2786-924b-4e71-aac1-43f338be56cc req-bce7e2be-0550-450e-93b2-c2462a9c6142 service nova] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Updated VIF entry in instance network info cache for port 0f444395-3a03-4d13-9c2e-20a2965fcb9b. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1180.645591] env[68674]: DEBUG nova.network.neutron [req-d44e2786-924b-4e71-aac1-43f338be56cc req-bce7e2be-0550-450e-93b2-c2462a9c6142 service nova] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Updating instance_info_cache with network_info: [{"id": "0f444395-3a03-4d13-9c2e-20a2965fcb9b", "address": "fa:16:3e:34:66:f6", "network": {"id": "cd9a6296-fa96-4117-b8b5-3884d0d16745", "bridge": null, "label": "tempest-ServerActionsTestOtherB-1543887384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61ea6bfeb37d470a970e9c98e4827ade", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap0f444395-3a", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1180.650463] env[68674]: DEBUG oslo_vmware.api [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240998, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.685910] env[68674]: DEBUG nova.network.neutron [req-a78b71d3-57e2-416d-84d6-4de7c1d387e4 req-b1e4ee35-be13-4a56-9eb6-ab374fb06d7c service nova] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1180.907153] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240997, 'name': CreateVM_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.002923] env[68674]: DEBUG nova.network.neutron [-] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1181.139994] env[68674]: DEBUG nova.scheduler.client.report [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1181.153090] env[68674]: DEBUG oslo_concurrency.lockutils [req-d44e2786-924b-4e71-aac1-43f338be56cc req-bce7e2be-0550-450e-93b2-c2462a9c6142 service nova] Releasing lock "refresh_cache-8f183286-f908-4d05-9a61-d6b1bf10dfb9" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1181.153497] env[68674]: DEBUG oslo_vmware.api [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240998, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.189066] env[68674]: DEBUG oslo_concurrency.lockutils [req-a78b71d3-57e2-416d-84d6-4de7c1d387e4 req-b1e4ee35-be13-4a56-9eb6-ab374fb06d7c service nova] Releasing lock "refresh_cache-1b276f5a-9e53-4ef9-892b-4e4bd0dc09df" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1181.189926] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquired lock "refresh_cache-1b276f5a-9e53-4ef9-892b-4e4bd0dc09df" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1181.190113] env[68674]: DEBUG nova.network.neutron [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1181.342742] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "8f183286-f908-4d05-9a61-d6b1bf10dfb9" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1181.407146] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240997, 'name': CreateVM_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.505345] env[68674]: INFO nova.compute.manager [-] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Took 1.27 seconds to deallocate network for instance. [ 1181.648985] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.246s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1181.649623] env[68674]: DEBUG nova.compute.manager [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1181.653048] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.296s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1181.653384] env[68674]: DEBUG nova.objects.instance [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lazy-loading 'resources' on Instance uuid 8f183286-f908-4d05-9a61-d6b1bf10dfb9 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1181.660773] env[68674]: DEBUG oslo_vmware.api [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240998, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.723931] env[68674]: DEBUG nova.network.neutron [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1181.881905] env[68674]: DEBUG nova.network.neutron [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Updating instance_info_cache with network_info: [{"id": "43bd7986-ab0b-4dd8-a224-a42cd649e0d0", "address": "fa:16:3e:84:76:d0", "network": {"id": "2141da47-c6b2-4270-9d0f-d999f7c26b83", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-460904531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa34d6d90c6d46aaa2cb77259b5e0c27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b9aabc7c-0f6c-42eb-bd27-493a1496c0c8", "external-id": "nsx-vlan-transportzone-368", "segmentation_id": 368, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43bd7986-ab", "ovs_interfaceid": "43bd7986-ab0b-4dd8-a224-a42cd649e0d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1181.898403] env[68674]: DEBUG nova.compute.manager [req-3bd31820-364d-4886-82b5-d0d25cfb3d78 req-2a528ca2-8900-4abc-9331-954f0194d1f9 service nova] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Received event network-vif-deleted-c7865d2c-4c30-4dd4-b925-759844ba4a80 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1181.910504] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240997, 'name': CreateVM_Task, 'duration_secs': 1.402338} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.912265] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1181.912265] env[68674]: DEBUG oslo_concurrency.lockutils [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1181.912265] env[68674]: DEBUG oslo_concurrency.lockutils [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1181.912265] env[68674]: DEBUG oslo_concurrency.lockutils [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1181.912636] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b59a8e05-a6fa-43e6-b8d1-eae198cce125 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.919302] env[68674]: DEBUG oslo_vmware.api [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Waiting for the task: (returnval){ [ 1181.919302] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f81f7b-2157-cd38-0a07-f071a6575fe2" [ 1181.919302] env[68674]: _type = "Task" [ 1181.919302] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.928337] env[68674]: DEBUG oslo_vmware.api [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f81f7b-2157-cd38-0a07-f071a6575fe2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.013466] env[68674]: DEBUG oslo_concurrency.lockutils [None req-88d8b185-0ee4-49a2-ba28-8c3cde2f552c tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1182.154381] env[68674]: DEBUG oslo_vmware.api [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240998, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.156052] env[68674]: DEBUG nova.objects.instance [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lazy-loading 'numa_topology' on Instance uuid 8f183286-f908-4d05-9a61-d6b1bf10dfb9 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1182.159031] env[68674]: DEBUG nova.compute.utils [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1182.159834] env[68674]: DEBUG nova.compute.manager [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1182.159834] env[68674]: DEBUG nova.network.neutron [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1182.198020] env[68674]: DEBUG nova.policy [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd4306c590bce41dfb83eb474079deee4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9bc5a5f88cdd441fbb0df17cab2fcecc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 1182.384691] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Releasing lock "refresh_cache-1b276f5a-9e53-4ef9-892b-4e4bd0dc09df" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1182.385053] env[68674]: DEBUG nova.compute.manager [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Instance network_info: |[{"id": "43bd7986-ab0b-4dd8-a224-a42cd649e0d0", "address": "fa:16:3e:84:76:d0", "network": {"id": "2141da47-c6b2-4270-9d0f-d999f7c26b83", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-460904531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa34d6d90c6d46aaa2cb77259b5e0c27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b9aabc7c-0f6c-42eb-bd27-493a1496c0c8", "external-id": "nsx-vlan-transportzone-368", "segmentation_id": 368, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43bd7986-ab", "ovs_interfaceid": "43bd7986-ab0b-4dd8-a224-a42cd649e0d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1182.385530] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:84:76:d0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b9aabc7c-0f6c-42eb-bd27-493a1496c0c8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '43bd7986-ab0b-4dd8-a224-a42cd649e0d0', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1182.393463] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1182.393711] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1182.394028] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b744e575-2c3f-4bfb-bf0e-1c926e1789d4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.415066] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1182.415066] env[68674]: value = "task-3240999" [ 1182.415066] env[68674]: _type = "Task" [ 1182.415066] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.426182] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240999, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.432332] env[68674]: DEBUG oslo_vmware.api [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f81f7b-2157-cd38-0a07-f071a6575fe2, 'name': SearchDatastore_Task, 'duration_secs': 0.089201} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.432756] env[68674]: DEBUG oslo_concurrency.lockutils [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1182.432996] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1182.433250] env[68674]: DEBUG oslo_concurrency.lockutils [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1182.433405] env[68674]: DEBUG oslo_concurrency.lockutils [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1182.433583] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1182.433888] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6807f483-ef2f-4637-8fb8-52f7685fa84d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.450829] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1182.451049] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1182.451868] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5cd57972-c68e-4352-9857-4b7e390fe8c8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.458406] env[68674]: DEBUG oslo_vmware.api [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Waiting for the task: (returnval){ [ 1182.458406] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5278e329-aa5f-7ad2-1355-22a488e688d5" [ 1182.458406] env[68674]: _type = "Task" [ 1182.458406] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.466614] env[68674]: DEBUG oslo_vmware.api [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5278e329-aa5f-7ad2-1355-22a488e688d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.533485] env[68674]: DEBUG nova.network.neutron [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Successfully created port: 393444bd-993f-4249-b58b-e01020f12db7 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1182.655921] env[68674]: DEBUG oslo_vmware.api [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240998, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.663022] env[68674]: DEBUG nova.objects.base [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Object Instance<8f183286-f908-4d05-9a61-d6b1bf10dfb9> lazy-loaded attributes: resources,numa_topology {{(pid=68674) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1182.663784] env[68674]: DEBUG nova.compute.manager [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1182.849029] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8061133-203f-4a8d-b48c-691207428bbb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.856722] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b0b3ba5-0f5e-41d5-aac8-b36591ae50d6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.888829] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59598d77-9aef-4cd5-a1c4-a15aeb96f35f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.898116] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89689566-f51b-439b-b026-942158eb594b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.914398] env[68674]: DEBUG nova.compute.provider_tree [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1182.926452] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240999, 'name': CreateVM_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.968019] env[68674]: DEBUG oslo_vmware.api [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5278e329-aa5f-7ad2-1355-22a488e688d5, 'name': SearchDatastore_Task, 'duration_secs': 0.080942} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.968804] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b34dbb81-1a87-424a-bf17-e94221df770f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.974159] env[68674]: DEBUG oslo_vmware.api [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Waiting for the task: (returnval){ [ 1182.974159] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524e8326-e8a7-5760-eea6-1f0de9b15e3e" [ 1182.974159] env[68674]: _type = "Task" [ 1182.974159] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.981334] env[68674]: DEBUG oslo_vmware.api [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524e8326-e8a7-5760-eea6-1f0de9b15e3e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.156117] env[68674]: DEBUG oslo_vmware.api [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3240998, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.314039} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.156390] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4567b890-657d-4354-bb3d-083e8acf9038/4567b890-657d-4354-bb3d-083e8acf9038.vmdk to [datastore2] 9b8aad00-0980-4752-954a-c09c9ae6f9ec/9b8aad00-0980-4752-954a-c09c9ae6f9ec.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1183.157192] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dae2c17-d191-4de5-818a-75669aef0597 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.181910] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] 9b8aad00-0980-4752-954a-c09c9ae6f9ec/9b8aad00-0980-4752-954a-c09c9ae6f9ec.vmdk or device None with type streamOptimized {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1183.182389] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4726d599-89a9-4529-9bbf-538fb086504b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.202430] env[68674]: DEBUG oslo_vmware.api [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1183.202430] env[68674]: value = "task-3241000" [ 1183.202430] env[68674]: _type = "Task" [ 1183.202430] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.211120] env[68674]: DEBUG oslo_vmware.api [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3241000, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.417689] env[68674]: DEBUG nova.scheduler.client.report [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1183.430389] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3240999, 'name': CreateVM_Task, 'duration_secs': 0.631307} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.430590] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1183.431287] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1183.431457] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1183.431779] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1183.432074] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12c515c5-820a-44bd-86eb-ae3de6a6f916 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.437374] env[68674]: DEBUG oslo_vmware.api [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1183.437374] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5213c4d8-9dca-191e-2d6a-3ddad5739123" [ 1183.437374] env[68674]: _type = "Task" [ 1183.437374] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.446378] env[68674]: DEBUG oslo_vmware.api [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5213c4d8-9dca-191e-2d6a-3ddad5739123, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.485679] env[68674]: DEBUG oslo_vmware.api [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]524e8326-e8a7-5760-eea6-1f0de9b15e3e, 'name': SearchDatastore_Task, 'duration_secs': 0.008946} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.485679] env[68674]: DEBUG oslo_concurrency.lockutils [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1183.485679] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 0cbfda3e-337f-41f6-add2-1dcd725b0953/0cbfda3e-337f-41f6-add2-1dcd725b0953.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1183.486022] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8c625c81-5026-42dc-8270-bedb81a60a97 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.493665] env[68674]: DEBUG oslo_vmware.api [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Waiting for the task: (returnval){ [ 1183.493665] env[68674]: value = "task-3241001" [ 1183.493665] env[68674]: _type = "Task" [ 1183.493665] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.502257] env[68674]: DEBUG oslo_vmware.api [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': task-3241001, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.684376] env[68674]: DEBUG nova.compute.manager [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1183.713210] env[68674]: DEBUG nova.virt.hardware [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1183.713480] env[68674]: DEBUG nova.virt.hardware [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1183.713641] env[68674]: DEBUG nova.virt.hardware [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1183.713826] env[68674]: DEBUG nova.virt.hardware [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1183.714029] env[68674]: DEBUG nova.virt.hardware [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1183.714197] env[68674]: DEBUG nova.virt.hardware [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1183.714422] env[68674]: DEBUG nova.virt.hardware [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1183.714640] env[68674]: DEBUG nova.virt.hardware [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1183.714743] env[68674]: DEBUG nova.virt.hardware [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1183.714932] env[68674]: DEBUG nova.virt.hardware [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1183.715157] env[68674]: DEBUG nova.virt.hardware [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1183.716109] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77f47965-c0c9-4d56-9935-579c8d755284 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.725028] env[68674]: DEBUG oslo_vmware.api [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3241000, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.728426] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3167cf67-4dd8-4577-8949-7b4e62690a02 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.926019] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.273s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1183.928984] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.782s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1183.928984] env[68674]: DEBUG nova.objects.instance [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lazy-loading 'resources' on Instance uuid c876b288-de2a-4195-bfef-88f38e219d9a {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1183.948715] env[68674]: DEBUG oslo_vmware.api [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5213c4d8-9dca-191e-2d6a-3ddad5739123, 'name': SearchDatastore_Task, 'duration_secs': 0.059737} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.949047] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1183.949302] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1183.949549] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1183.949688] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1183.949867] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1183.950156] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-da7f61b8-987e-43b9-9be5-d12fa828d779 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.965543] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1183.965897] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1183.967026] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17ffc59e-18db-4f1b-ae12-34769a887dbf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.973370] env[68674]: DEBUG oslo_vmware.api [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1183.973370] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523abe5f-4f94-a8d2-ae78-1f6942c4912a" [ 1183.973370] env[68674]: _type = "Task" [ 1183.973370] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.979382] env[68674]: DEBUG nova.compute.manager [req-c3332e81-18ac-4dfd-bee2-f762020790ef req-2d47764d-e49f-4539-adad-d770a958c440 service nova] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Received event network-vif-plugged-393444bd-993f-4249-b58b-e01020f12db7 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1183.979605] env[68674]: DEBUG oslo_concurrency.lockutils [req-c3332e81-18ac-4dfd-bee2-f762020790ef req-2d47764d-e49f-4539-adad-d770a958c440 service nova] Acquiring lock "f4751bd8-e0df-4686-a22f-e51a4a98b8d6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1183.979812] env[68674]: DEBUG oslo_concurrency.lockutils [req-c3332e81-18ac-4dfd-bee2-f762020790ef req-2d47764d-e49f-4539-adad-d770a958c440 service nova] Lock "f4751bd8-e0df-4686-a22f-e51a4a98b8d6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1183.979979] env[68674]: DEBUG oslo_concurrency.lockutils [req-c3332e81-18ac-4dfd-bee2-f762020790ef req-2d47764d-e49f-4539-adad-d770a958c440 service nova] Lock "f4751bd8-e0df-4686-a22f-e51a4a98b8d6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1183.980163] env[68674]: DEBUG nova.compute.manager [req-c3332e81-18ac-4dfd-bee2-f762020790ef req-2d47764d-e49f-4539-adad-d770a958c440 service nova] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] No waiting events found dispatching network-vif-plugged-393444bd-993f-4249-b58b-e01020f12db7 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1183.980332] env[68674]: WARNING nova.compute.manager [req-c3332e81-18ac-4dfd-bee2-f762020790ef req-2d47764d-e49f-4539-adad-d770a958c440 service nova] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Received unexpected event network-vif-plugged-393444bd-993f-4249-b58b-e01020f12db7 for instance with vm_state building and task_state spawning. [ 1183.986040] env[68674]: DEBUG oslo_vmware.api [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523abe5f-4f94-a8d2-ae78-1f6942c4912a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.003636] env[68674]: DEBUG oslo_vmware.api [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': task-3241001, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.115599] env[68674]: DEBUG nova.network.neutron [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Successfully updated port: 393444bd-993f-4249-b58b-e01020f12db7 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1184.214811] env[68674]: DEBUG oslo_vmware.api [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3241000, 'name': ReconfigVM_Task, 'duration_secs': 0.713977} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.215156] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Reconfigured VM instance instance-0000006e to attach disk [datastore2] 9b8aad00-0980-4752-954a-c09c9ae6f9ec/9b8aad00-0980-4752-954a-c09c9ae6f9ec.vmdk or device None with type streamOptimized {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1184.215882] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ca774bfb-3d64-4694-8d8f-cb443a368a18 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.222922] env[68674]: DEBUG oslo_vmware.api [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1184.222922] env[68674]: value = "task-3241002" [ 1184.222922] env[68674]: _type = "Task" [ 1184.222922] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.231505] env[68674]: DEBUG oslo_vmware.api [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3241002, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.441235] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fe7c7731-eacd-416c-8b56-2ce242b7fcf0 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "8f183286-f908-4d05-9a61-d6b1bf10dfb9" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 26.960s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1184.442536] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "8f183286-f908-4d05-9a61-d6b1bf10dfb9" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 3.100s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1184.442750] env[68674]: INFO nova.compute.manager [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Unshelving [ 1184.484654] env[68674]: DEBUG oslo_vmware.api [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523abe5f-4f94-a8d2-ae78-1f6942c4912a, 'name': SearchDatastore_Task, 'duration_secs': 0.037214} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.490026] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1853acf-b7c5-46f2-b716-bffd4f5de4e3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.500056] env[68674]: DEBUG oslo_vmware.api [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1184.500056] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5235ac25-23c2-81d0-531c-0e392a33a550" [ 1184.500056] env[68674]: _type = "Task" [ 1184.500056] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.513138] env[68674]: DEBUG oslo_vmware.api [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': task-3241001, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.520216] env[68674]: DEBUG oslo_vmware.api [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5235ac25-23c2-81d0-531c-0e392a33a550, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.618877] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquiring lock "refresh_cache-f4751bd8-e0df-4686-a22f-e51a4a98b8d6" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1184.619048] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquired lock "refresh_cache-f4751bd8-e0df-4686-a22f-e51a4a98b8d6" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1184.619175] env[68674]: DEBUG nova.network.neutron [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1184.629065] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6066f05-f3ca-4e8e-9f23-4f0c53d212a0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.641924] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e80de407-1f31-4c68-bc35-57f1aad95a26 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.674801] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbdaef58-a0e8-4b51-828c-e64c87e263d2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.683871] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3a715ce-87ba-47c3-a08b-e99d050f1789 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.698500] env[68674]: DEBUG nova.compute.provider_tree [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1184.732267] env[68674]: DEBUG oslo_vmware.api [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3241002, 'name': Rename_Task, 'duration_secs': 0.152968} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.732592] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1184.732845] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7616334d-12f8-4bde-893a-9ef8d68cfd0a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.739495] env[68674]: DEBUG oslo_vmware.api [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1184.739495] env[68674]: value = "task-3241003" [ 1184.739495] env[68674]: _type = "Task" [ 1184.739495] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.747770] env[68674]: DEBUG oslo_vmware.api [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3241003, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.005279] env[68674]: DEBUG oslo_vmware.api [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': task-3241001, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.015522] env[68674]: DEBUG oslo_vmware.api [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5235ac25-23c2-81d0-531c-0e392a33a550, 'name': SearchDatastore_Task, 'duration_secs': 0.058586} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.016167] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1185.016432] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df/1b276f5a-9e53-4ef9-892b-4e4bd0dc09df.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1185.016691] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-596d90d7-b012-41b0-a73d-9a0e0aeb230b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.023438] env[68674]: DEBUG oslo_vmware.api [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1185.023438] env[68674]: value = "task-3241004" [ 1185.023438] env[68674]: _type = "Task" [ 1185.023438] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.030681] env[68674]: DEBUG oslo_vmware.api [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3241004, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.156325] env[68674]: DEBUG nova.network.neutron [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1185.202201] env[68674]: DEBUG nova.scheduler.client.report [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1185.249285] env[68674]: DEBUG oslo_vmware.api [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3241003, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.332450] env[68674]: DEBUG nova.network.neutron [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Updating instance_info_cache with network_info: [{"id": "393444bd-993f-4249-b58b-e01020f12db7", "address": "fa:16:3e:b8:db:7c", "network": {"id": "e5c1d0d2-3458-4788-9640-4e14ad781436", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1292108367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9bc5a5f88cdd441fbb0df17cab2fcecc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8cbc9b8f-ce19-4262-bf4d-88cd4f259a1c", "external-id": "nsx-vlan-transportzone-630", "segmentation_id": 630, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap393444bd-99", "ovs_interfaceid": "393444bd-993f-4249-b58b-e01020f12db7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1185.470845] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1185.505323] env[68674]: DEBUG oslo_vmware.api [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': task-3241001, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.532465] env[68674]: DEBUG oslo_vmware.api [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3241004, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.537284] env[68674]: INFO nova.compute.manager [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Rebuilding instance [ 1185.572925] env[68674]: DEBUG nova.compute.manager [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1185.573830] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a195bcdb-c45e-411b-a5c8-cc73a8d4fd02 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.708100] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.779s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1185.711070] env[68674]: DEBUG oslo_concurrency.lockutils [None req-88d8b185-0ee4-49a2-ba28-8c3cde2f552c tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.697s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1185.711070] env[68674]: DEBUG nova.objects.instance [None req-88d8b185-0ee4-49a2-ba28-8c3cde2f552c tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lazy-loading 'resources' on Instance uuid 5fa43d94-64af-4cd2-9976-ca9cd994447e {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1185.727499] env[68674]: INFO nova.scheduler.client.report [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Deleted allocations for instance c876b288-de2a-4195-bfef-88f38e219d9a [ 1185.748851] env[68674]: DEBUG oslo_vmware.api [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3241003, 'name': PowerOnVM_Task, 'duration_secs': 0.565874} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.749206] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1185.835346] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Releasing lock "refresh_cache-f4751bd8-e0df-4686-a22f-e51a4a98b8d6" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1185.836128] env[68674]: DEBUG nova.compute.manager [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Instance network_info: |[{"id": "393444bd-993f-4249-b58b-e01020f12db7", "address": "fa:16:3e:b8:db:7c", "network": {"id": "e5c1d0d2-3458-4788-9640-4e14ad781436", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1292108367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9bc5a5f88cdd441fbb0df17cab2fcecc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8cbc9b8f-ce19-4262-bf4d-88cd4f259a1c", "external-id": "nsx-vlan-transportzone-630", "segmentation_id": 630, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap393444bd-99", "ovs_interfaceid": "393444bd-993f-4249-b58b-e01020f12db7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1185.836575] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b8:db:7c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8cbc9b8f-ce19-4262-bf4d-88cd4f259a1c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '393444bd-993f-4249-b58b-e01020f12db7', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1185.851399] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1185.852033] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1185.852905] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5ad5bfcb-dcda-4402-9977-01be3281d86c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.869281] env[68674]: DEBUG nova.compute.manager [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1185.870230] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f7c77b-84c1-4354-96f3-053f5dbcda57 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.881240] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1185.881240] env[68674]: value = "task-3241005" [ 1185.881240] env[68674]: _type = "Task" [ 1185.881240] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.889504] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241005, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.007989] env[68674]: DEBUG oslo_vmware.api [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': task-3241001, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.228523} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.009408] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 0cbfda3e-337f-41f6-add2-1dcd725b0953/0cbfda3e-337f-41f6-add2-1dcd725b0953.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1186.009639] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1186.010988] env[68674]: DEBUG nova.compute.manager [req-6444affb-736c-4903-9d31-3521e555f704 req-e605fcf9-50f3-4e7b-9052-83c701b7de16 service nova] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Received event network-changed-393444bd-993f-4249-b58b-e01020f12db7 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1186.011182] env[68674]: DEBUG nova.compute.manager [req-6444affb-736c-4903-9d31-3521e555f704 req-e605fcf9-50f3-4e7b-9052-83c701b7de16 service nova] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Refreshing instance network info cache due to event network-changed-393444bd-993f-4249-b58b-e01020f12db7. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1186.011392] env[68674]: DEBUG oslo_concurrency.lockutils [req-6444affb-736c-4903-9d31-3521e555f704 req-e605fcf9-50f3-4e7b-9052-83c701b7de16 service nova] Acquiring lock "refresh_cache-f4751bd8-e0df-4686-a22f-e51a4a98b8d6" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1186.011537] env[68674]: DEBUG oslo_concurrency.lockutils [req-6444affb-736c-4903-9d31-3521e555f704 req-e605fcf9-50f3-4e7b-9052-83c701b7de16 service nova] Acquired lock "refresh_cache-f4751bd8-e0df-4686-a22f-e51a4a98b8d6" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1186.011697] env[68674]: DEBUG nova.network.neutron [req-6444affb-736c-4903-9d31-3521e555f704 req-e605fcf9-50f3-4e7b-9052-83c701b7de16 service nova] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Refreshing network info cache for port 393444bd-993f-4249-b58b-e01020f12db7 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1186.013321] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0581f224-7a0e-4aae-8d38-557c6dfd4126 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.021876] env[68674]: DEBUG oslo_vmware.api [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Waiting for the task: (returnval){ [ 1186.021876] env[68674]: value = "task-3241006" [ 1186.021876] env[68674]: _type = "Task" [ 1186.021876] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.036504] env[68674]: DEBUG oslo_vmware.api [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': task-3241006, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.040110] env[68674]: DEBUG oslo_vmware.api [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3241004, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.235059] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0bae5d11-0176-494d-b3fc-c9fdc5e1ec71 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "c876b288-de2a-4195-bfef-88f38e219d9a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.073s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1186.369136] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2d17be6-7707-48ac-9676-7a9925842973 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.377581] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81d64c5f-1e90-434e-8855-db1c76560fbc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.388873] env[68674]: DEBUG oslo_concurrency.lockutils [None req-4d218104-c4aa-4691-a108-9526a319a493 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lock "9b8aad00-0980-4752-954a-c09c9ae6f9ec" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 24.715s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1186.418594] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6ab28e5-722b-4e46-ba95-85f64ae31e87 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.424160] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241005, 'name': CreateVM_Task, 'duration_secs': 0.381577} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.424644] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1186.425345] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1186.425514] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1186.425822] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1186.426091] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87fbbf7b-27e1-4c3b-90a7-e3d239ccfe5e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.431201] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42ae3864-b7cf-4028-a67d-dd871f53b688 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.436180] env[68674]: DEBUG oslo_vmware.api [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for the task: (returnval){ [ 1186.436180] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5294938a-84dc-a127-7480-e9ea1551a772" [ 1186.436180] env[68674]: _type = "Task" [ 1186.436180] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.447464] env[68674]: DEBUG nova.compute.provider_tree [None req-88d8b185-0ee4-49a2-ba28-8c3cde2f552c tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1186.453365] env[68674]: DEBUG oslo_vmware.api [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5294938a-84dc-a127-7480-e9ea1551a772, 'name': SearchDatastore_Task, 'duration_secs': 0.010333} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.453629] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1186.453875] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1186.454128] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1186.454278] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1186.454454] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1186.454685] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-28acf51f-1af7-40af-8708-1205f199ac1d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.462566] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1186.462799] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1186.463669] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6bd9a483-7f67-4e96-a0d2-a27af9057148 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.468661] env[68674]: DEBUG oslo_vmware.api [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for the task: (returnval){ [ 1186.468661] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5238f807-cd63-2c1d-eeb0-ccae6d6d2b7f" [ 1186.468661] env[68674]: _type = "Task" [ 1186.468661] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.476095] env[68674]: DEBUG oslo_vmware.api [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5238f807-cd63-2c1d-eeb0-ccae6d6d2b7f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.532793] env[68674]: DEBUG oslo_vmware.api [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': task-3241006, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.141105} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.533474] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1186.534250] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-824c57a6-697e-4391-b036-ce3c3496b65f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.539375] env[68674]: DEBUG oslo_vmware.api [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3241004, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.123701} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.539937] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df/1b276f5a-9e53-4ef9-892b-4e4bd0dc09df.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1186.540169] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1186.540400] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8d304bac-f799-4a1a-8109-74def855d7f2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.562946] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Reconfiguring VM instance instance-00000078 to attach disk [datastore2] 0cbfda3e-337f-41f6-add2-1dcd725b0953/0cbfda3e-337f-41f6-add2-1dcd725b0953.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1186.565723] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49025a0b-dad5-4ee8-b42e-c898e60db7e0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.583589] env[68674]: DEBUG oslo_vmware.api [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1186.583589] env[68674]: value = "task-3241007" [ 1186.583589] env[68674]: _type = "Task" [ 1186.583589] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.587193] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1186.588414] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-69e71b15-3bd9-4f67-a914-130f8d221a95 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.590868] env[68674]: DEBUG oslo_vmware.api [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Waiting for the task: (returnval){ [ 1186.590868] env[68674]: value = "task-3241008" [ 1186.590868] env[68674]: _type = "Task" [ 1186.590868] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.596291] env[68674]: DEBUG oslo_vmware.api [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3241007, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.601494] env[68674]: DEBUG oslo_vmware.api [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': task-3241008, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.602745] env[68674]: DEBUG oslo_vmware.api [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1186.602745] env[68674]: value = "task-3241009" [ 1186.602745] env[68674]: _type = "Task" [ 1186.602745] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.612082] env[68674]: DEBUG oslo_vmware.api [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241009, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.746493] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ac408954-6269-4230-8ff7-447acf7d0842 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "dbbf1313-6e44-45e2-8bf6-83409f06cb4b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1186.746754] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ac408954-6269-4230-8ff7-447acf7d0842 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "dbbf1313-6e44-45e2-8bf6-83409f06cb4b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1186.747065] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ac408954-6269-4230-8ff7-447acf7d0842 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "dbbf1313-6e44-45e2-8bf6-83409f06cb4b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1186.747303] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ac408954-6269-4230-8ff7-447acf7d0842 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "dbbf1313-6e44-45e2-8bf6-83409f06cb4b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1186.747480] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ac408954-6269-4230-8ff7-447acf7d0842 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "dbbf1313-6e44-45e2-8bf6-83409f06cb4b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1186.749744] env[68674]: INFO nova.compute.manager [None req-ac408954-6269-4230-8ff7-447acf7d0842 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Terminating instance [ 1186.783695] env[68674]: DEBUG nova.network.neutron [req-6444affb-736c-4903-9d31-3521e555f704 req-e605fcf9-50f3-4e7b-9052-83c701b7de16 service nova] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Updated VIF entry in instance network info cache for port 393444bd-993f-4249-b58b-e01020f12db7. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1186.784087] env[68674]: DEBUG nova.network.neutron [req-6444affb-736c-4903-9d31-3521e555f704 req-e605fcf9-50f3-4e7b-9052-83c701b7de16 service nova] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Updating instance_info_cache with network_info: [{"id": "393444bd-993f-4249-b58b-e01020f12db7", "address": "fa:16:3e:b8:db:7c", "network": {"id": "e5c1d0d2-3458-4788-9640-4e14ad781436", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1292108367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9bc5a5f88cdd441fbb0df17cab2fcecc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8cbc9b8f-ce19-4262-bf4d-88cd4f259a1c", "external-id": "nsx-vlan-transportzone-630", "segmentation_id": 630, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap393444bd-99", "ovs_interfaceid": "393444bd-993f-4249-b58b-e01020f12db7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1186.952644] env[68674]: DEBUG nova.scheduler.client.report [None req-88d8b185-0ee4-49a2-ba28-8c3cde2f552c tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1186.980032] env[68674]: DEBUG oslo_vmware.api [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5238f807-cd63-2c1d-eeb0-ccae6d6d2b7f, 'name': SearchDatastore_Task, 'duration_secs': 0.010664} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.980642] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a162fa3f-09df-4792-987c-ad80ff102788 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.985615] env[68674]: DEBUG oslo_vmware.api [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for the task: (returnval){ [ 1186.985615] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a72b38-8743-78c5-470e-840e64932cf4" [ 1186.985615] env[68674]: _type = "Task" [ 1186.985615] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.993374] env[68674]: DEBUG oslo_vmware.api [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a72b38-8743-78c5-470e-840e64932cf4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.093169] env[68674]: DEBUG oslo_vmware.api [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3241007, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083714} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.096139] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1187.099028] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56bc9a89-1b56-4bda-b3e5-873615f593c6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.104249] env[68674]: DEBUG oslo_vmware.api [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': task-3241008, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.124078] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Reconfiguring VM instance instance-00000079 to attach disk [datastore2] 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df/1b276f5a-9e53-4ef9-892b-4e4bd0dc09df.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1187.124342] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e430273c-3fe8-43f6-bf7d-fc871c65ce0a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.143187] env[68674]: DEBUG oslo_vmware.api [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241009, 'name': PowerOffVM_Task, 'duration_secs': 0.478785} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.144299] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1187.144541] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1187.144855] env[68674]: DEBUG oslo_vmware.api [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1187.144855] env[68674]: value = "task-3241010" [ 1187.144855] env[68674]: _type = "Task" [ 1187.144855] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.145541] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-808b9855-d151-4601-80ac-7f4d85093a9a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.154785] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1187.157694] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-00d79644-16f3-448e-81a9-838046fde99f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.159029] env[68674]: DEBUG oslo_vmware.api [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3241010, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.228184] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1187.228727] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1187.228727] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Deleting the datastore file [datastore2] c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1187.228932] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-03a8b465-0a58-439a-9d3e-924e2edd0a7b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.235854] env[68674]: DEBUG oslo_vmware.api [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1187.235854] env[68674]: value = "task-3241012" [ 1187.235854] env[68674]: _type = "Task" [ 1187.235854] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.244838] env[68674]: DEBUG oslo_vmware.api [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241012, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.254737] env[68674]: DEBUG nova.compute.manager [None req-ac408954-6269-4230-8ff7-447acf7d0842 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1187.254737] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ac408954-6269-4230-8ff7-447acf7d0842 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1187.255263] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3885ad92-5fda-4795-aeb1-23f23a564c2f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.262985] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac408954-6269-4230-8ff7-447acf7d0842 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1187.263996] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-97053d8f-d0ba-4b98-8603-6091b5bbe94f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.270704] env[68674]: DEBUG oslo_vmware.api [None req-ac408954-6269-4230-8ff7-447acf7d0842 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 1187.270704] env[68674]: value = "task-3241013" [ 1187.270704] env[68674]: _type = "Task" [ 1187.270704] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.278962] env[68674]: DEBUG oslo_vmware.api [None req-ac408954-6269-4230-8ff7-447acf7d0842 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3241013, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.286666] env[68674]: DEBUG oslo_concurrency.lockutils [req-6444affb-736c-4903-9d31-3521e555f704 req-e605fcf9-50f3-4e7b-9052-83c701b7de16 service nova] Releasing lock "refresh_cache-f4751bd8-e0df-4686-a22f-e51a4a98b8d6" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1187.459610] env[68674]: DEBUG oslo_concurrency.lockutils [None req-88d8b185-0ee4-49a2-ba28-8c3cde2f552c tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.749s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1187.462127] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.991s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1187.462400] env[68674]: DEBUG nova.objects.instance [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lazy-loading 'pci_requests' on Instance uuid 8f183286-f908-4d05-9a61-d6b1bf10dfb9 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1187.477816] env[68674]: INFO nova.scheduler.client.report [None req-88d8b185-0ee4-49a2-ba28-8c3cde2f552c tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Deleted allocations for instance 5fa43d94-64af-4cd2-9976-ca9cd994447e [ 1187.495396] env[68674]: DEBUG oslo_vmware.api [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a72b38-8743-78c5-470e-840e64932cf4, 'name': SearchDatastore_Task, 'duration_secs': 0.010552} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.495699] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1187.496014] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] f4751bd8-e0df-4686-a22f-e51a4a98b8d6/f4751bd8-e0df-4686-a22f-e51a4a98b8d6.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1187.496311] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6bec09db-a681-4c11-a67f-f4bb5b0dd0f9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.502490] env[68674]: DEBUG oslo_vmware.api [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for the task: (returnval){ [ 1187.502490] env[68674]: value = "task-3241014" [ 1187.502490] env[68674]: _type = "Task" [ 1187.502490] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.510124] env[68674]: DEBUG oslo_vmware.api [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3241014, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.602464] env[68674]: DEBUG oslo_vmware.api [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': task-3241008, 'name': ReconfigVM_Task, 'duration_secs': 0.692014} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.602778] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Reconfigured VM instance instance-00000078 to attach disk [datastore2] 0cbfda3e-337f-41f6-add2-1dcd725b0953/0cbfda3e-337f-41f6-add2-1dcd725b0953.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1187.603459] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-832be4e3-37ec-4236-b7ff-f6b71946327c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.609208] env[68674]: DEBUG oslo_vmware.api [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Waiting for the task: (returnval){ [ 1187.609208] env[68674]: value = "task-3241015" [ 1187.609208] env[68674]: _type = "Task" [ 1187.609208] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.618019] env[68674]: DEBUG oslo_vmware.api [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': task-3241015, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.657871] env[68674]: DEBUG oslo_vmware.api [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3241010, 'name': ReconfigVM_Task, 'duration_secs': 0.284123} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.658319] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Reconfigured VM instance instance-00000079 to attach disk [datastore2] 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df/1b276f5a-9e53-4ef9-892b-4e4bd0dc09df.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1187.659091] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1641581f-0fbe-4444-b70c-fff266668511 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.665632] env[68674]: DEBUG oslo_vmware.api [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1187.665632] env[68674]: value = "task-3241016" [ 1187.665632] env[68674]: _type = "Task" [ 1187.665632] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.674294] env[68674]: DEBUG oslo_vmware.api [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3241016, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.714278] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58ab0ce2-2af3-46fc-ad62-9418e746a0a3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.721331] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-72f29e32-52f6-41d7-bb7a-175c9834de03 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Suspending the VM {{(pid=68674) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1187.721623] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-43f6ce7e-8bf9-416d-afe3-43fb63239e39 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.728814] env[68674]: DEBUG oslo_vmware.api [None req-72f29e32-52f6-41d7-bb7a-175c9834de03 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1187.728814] env[68674]: value = "task-3241017" [ 1187.728814] env[68674]: _type = "Task" [ 1187.728814] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.741174] env[68674]: DEBUG oslo_vmware.api [None req-72f29e32-52f6-41d7-bb7a-175c9834de03 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3241017, 'name': SuspendVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.751222] env[68674]: DEBUG oslo_vmware.api [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241012, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148549} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.751524] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1187.751710] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1187.751890] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1187.781889] env[68674]: DEBUG oslo_vmware.api [None req-ac408954-6269-4230-8ff7-447acf7d0842 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3241013, 'name': PowerOffVM_Task, 'duration_secs': 0.328013} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.783982] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac408954-6269-4230-8ff7-447acf7d0842 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1187.784307] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ac408954-6269-4230-8ff7-447acf7d0842 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1187.784680] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-004ac75c-601b-4763-95b7-a303e695ad00 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.903243] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ac408954-6269-4230-8ff7-447acf7d0842 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1187.903688] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ac408954-6269-4230-8ff7-447acf7d0842 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1187.904109] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac408954-6269-4230-8ff7-447acf7d0842 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Deleting the datastore file [datastore2] dbbf1313-6e44-45e2-8bf6-83409f06cb4b {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1187.904555] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9040fcf9-cab7-4958-a561-6d5c42c42f9f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.913594] env[68674]: DEBUG oslo_vmware.api [None req-ac408954-6269-4230-8ff7-447acf7d0842 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 1187.913594] env[68674]: value = "task-3241019" [ 1187.913594] env[68674]: _type = "Task" [ 1187.913594] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.929277] env[68674]: DEBUG oslo_vmware.api [None req-ac408954-6269-4230-8ff7-447acf7d0842 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3241019, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.966416] env[68674]: DEBUG nova.objects.instance [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lazy-loading 'numa_topology' on Instance uuid 8f183286-f908-4d05-9a61-d6b1bf10dfb9 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1187.986922] env[68674]: DEBUG oslo_concurrency.lockutils [None req-88d8b185-0ee4-49a2-ba28-8c3cde2f552c tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "5fa43d94-64af-4cd2-9976-ca9cd994447e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.414s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.014274] env[68674]: DEBUG oslo_vmware.api [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3241014, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.45241} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.014573] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] f4751bd8-e0df-4686-a22f-e51a4a98b8d6/f4751bd8-e0df-4686-a22f-e51a4a98b8d6.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1188.014810] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1188.015138] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1b683a29-5bdf-4e0d-a530-02f8080e0e45 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.022544] env[68674]: DEBUG oslo_vmware.api [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for the task: (returnval){ [ 1188.022544] env[68674]: value = "task-3241020" [ 1188.022544] env[68674]: _type = "Task" [ 1188.022544] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.031257] env[68674]: DEBUG oslo_vmware.api [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3241020, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.121802] env[68674]: DEBUG oslo_vmware.api [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': task-3241015, 'name': Rename_Task, 'duration_secs': 0.380603} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.122186] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1188.122357] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-de795e4d-4fc7-49ed-9ddf-d8e1203ab604 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.129916] env[68674]: DEBUG oslo_vmware.api [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Waiting for the task: (returnval){ [ 1188.129916] env[68674]: value = "task-3241021" [ 1188.129916] env[68674]: _type = "Task" [ 1188.129916] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.138087] env[68674]: DEBUG oslo_vmware.api [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': task-3241021, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.175623] env[68674]: DEBUG oslo_vmware.api [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3241016, 'name': Rename_Task, 'duration_secs': 0.157485} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.175985] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1188.176826] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2d83d70b-6477-4933-9394-02419178a65d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.182881] env[68674]: DEBUG oslo_vmware.api [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1188.182881] env[68674]: value = "task-3241022" [ 1188.182881] env[68674]: _type = "Task" [ 1188.182881] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.191105] env[68674]: DEBUG oslo_vmware.api [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3241022, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.238387] env[68674]: DEBUG oslo_vmware.api [None req-72f29e32-52f6-41d7-bb7a-175c9834de03 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3241017, 'name': SuspendVM_Task} progress is 70%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.424362] env[68674]: DEBUG oslo_vmware.api [None req-ac408954-6269-4230-8ff7-447acf7d0842 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3241019, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.227188} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.424704] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac408954-6269-4230-8ff7-447acf7d0842 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1188.424901] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ac408954-6269-4230-8ff7-447acf7d0842 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1188.425193] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ac408954-6269-4230-8ff7-447acf7d0842 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1188.425432] env[68674]: INFO nova.compute.manager [None req-ac408954-6269-4230-8ff7-447acf7d0842 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1188.425718] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ac408954-6269-4230-8ff7-447acf7d0842 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1188.425975] env[68674]: DEBUG nova.compute.manager [-] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1188.426141] env[68674]: DEBUG nova.network.neutron [-] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1188.470226] env[68674]: INFO nova.compute.claims [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1188.533438] env[68674]: DEBUG oslo_vmware.api [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3241020, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07478} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.533808] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1188.534718] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf8a83c3-5b5f-4b33-84c2-ea3baac3f542 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.566251] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] f4751bd8-e0df-4686-a22f-e51a4a98b8d6/f4751bd8-e0df-4686-a22f-e51a4a98b8d6.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1188.566672] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d2d42752-0177-44f0-af74-97779bcfe2e9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.599086] env[68674]: DEBUG oslo_vmware.api [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for the task: (returnval){ [ 1188.599086] env[68674]: value = "task-3241023" [ 1188.599086] env[68674]: _type = "Task" [ 1188.599086] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.611113] env[68674]: DEBUG oslo_vmware.api [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3241023, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.640043] env[68674]: DEBUG oslo_vmware.api [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': task-3241021, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.696055] env[68674]: DEBUG oslo_vmware.api [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3241022, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.739807] env[68674]: DEBUG oslo_vmware.api [None req-72f29e32-52f6-41d7-bb7a-175c9834de03 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3241017, 'name': SuspendVM_Task} progress is 70%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.742528] env[68674]: DEBUG nova.compute.manager [req-eaf8639b-9713-4cde-af4e-c44db40b6f9a req-33ada836-f4c8-4963-8981-1e6e72915390 service nova] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Received event network-vif-deleted-7111fb79-ad70-4af7-9c47-0e2443a51a32 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1188.742740] env[68674]: INFO nova.compute.manager [req-eaf8639b-9713-4cde-af4e-c44db40b6f9a req-33ada836-f4c8-4963-8981-1e6e72915390 service nova] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Neutron deleted interface 7111fb79-ad70-4af7-9c47-0e2443a51a32; detaching it from the instance and deleting it from the info cache [ 1188.742919] env[68674]: DEBUG nova.network.neutron [req-eaf8639b-9713-4cde-af4e-c44db40b6f9a req-33ada836-f4c8-4963-8981-1e6e72915390 service nova] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1188.793222] env[68674]: DEBUG nova.virt.hardware [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1188.793492] env[68674]: DEBUG nova.virt.hardware [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1188.793651] env[68674]: DEBUG nova.virt.hardware [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1188.793950] env[68674]: DEBUG nova.virt.hardware [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1188.794550] env[68674]: DEBUG nova.virt.hardware [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1188.794550] env[68674]: DEBUG nova.virt.hardware [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1188.794787] env[68674]: DEBUG nova.virt.hardware [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1188.795091] env[68674]: DEBUG nova.virt.hardware [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1188.795398] env[68674]: DEBUG nova.virt.hardware [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1188.795679] env[68674]: DEBUG nova.virt.hardware [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1188.795956] env[68674]: DEBUG nova.virt.hardware [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1188.797253] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8871ed4-4990-47a3-86fe-d88a9c4a9939 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.806898] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c095f2ca-8bc3-4077-994e-1c241538f2c2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.821791] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:90:e0:85', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8868dc2-7767-49c0-a2ed-e611fcbf8414', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '47cc2f82-8285-4168-b696-407ade0efaaf', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1188.829224] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1188.829780] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1188.829991] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-61b866dd-7a8c-452b-a387-dd8f0ae53594 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.849163] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1188.849163] env[68674]: value = "task-3241024" [ 1188.849163] env[68674]: _type = "Task" [ 1188.849163] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.863440] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241024, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.982016] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "c2728961-9f06-4494-9c48-dd096eae8b4e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1188.982438] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "c2728961-9f06-4494-9c48-dd096eae8b4e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1189.110574] env[68674]: DEBUG oslo_vmware.api [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3241023, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.141035] env[68674]: DEBUG oslo_vmware.api [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': task-3241021, 'name': PowerOnVM_Task, 'duration_secs': 0.893079} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.141035] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1189.141035] env[68674]: INFO nova.compute.manager [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Took 14.66 seconds to spawn the instance on the hypervisor. [ 1189.141035] env[68674]: DEBUG nova.compute.manager [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1189.141618] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ec90005-ffd1-403b-85f6-65620b123372 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.193379] env[68674]: DEBUG nova.network.neutron [-] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1189.194717] env[68674]: DEBUG oslo_vmware.api [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3241022, 'name': PowerOnVM_Task, 'duration_secs': 0.612924} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.195153] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1189.195363] env[68674]: INFO nova.compute.manager [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Took 10.60 seconds to spawn the instance on the hypervisor. [ 1189.195538] env[68674]: DEBUG nova.compute.manager [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1189.196455] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7dbbcf9-310c-4d7f-991d-b4db9b38cbe0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.239737] env[68674]: DEBUG oslo_vmware.api [None req-72f29e32-52f6-41d7-bb7a-175c9834de03 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3241017, 'name': SuspendVM_Task, 'duration_secs': 1.08658} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.240007] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-72f29e32-52f6-41d7-bb7a-175c9834de03 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Suspended the VM {{(pid=68674) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1189.240211] env[68674]: DEBUG nova.compute.manager [None req-72f29e32-52f6-41d7-bb7a-175c9834de03 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1189.241032] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-840837ad-f556-4603-a96d-3bae41e0ec32 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.245609] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dce87b14-7bec-468a-8152-cc8ffac0f76a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.257658] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46ffaf9e-23a8-4439-8843-6789d59258e0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.291367] env[68674]: DEBUG nova.compute.manager [req-eaf8639b-9713-4cde-af4e-c44db40b6f9a req-33ada836-f4c8-4963-8981-1e6e72915390 service nova] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Detach interface failed, port_id=7111fb79-ad70-4af7-9c47-0e2443a51a32, reason: Instance dbbf1313-6e44-45e2-8bf6-83409f06cb4b could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1189.359126] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241024, 'name': CreateVM_Task, 'duration_secs': 0.383182} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.359329] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1189.359939] env[68674]: DEBUG oslo_concurrency.lockutils [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1189.360124] env[68674]: DEBUG oslo_concurrency.lockutils [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1189.360441] env[68674]: DEBUG oslo_concurrency.lockutils [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1189.360681] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dca403bc-bcbf-499e-91db-367cccfb9867 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.364835] env[68674]: DEBUG oslo_vmware.api [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1189.364835] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a1ae18-5ff7-afcf-4de3-56490ed7fa39" [ 1189.364835] env[68674]: _type = "Task" [ 1189.364835] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.371944] env[68674]: DEBUG oslo_vmware.api [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a1ae18-5ff7-afcf-4de3-56490ed7fa39, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.486288] env[68674]: DEBUG nova.compute.manager [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1189.610036] env[68674]: DEBUG oslo_vmware.api [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3241023, 'name': ReconfigVM_Task, 'duration_secs': 0.615315} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.613213] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Reconfigured VM instance instance-0000007a to attach disk [datastore1] f4751bd8-e0df-4686-a22f-e51a4a98b8d6/f4751bd8-e0df-4686-a22f-e51a4a98b8d6.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1189.617336] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5dcf7374-b40d-4cb8-9b8c-ca5096d61cda {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.622059] env[68674]: DEBUG oslo_vmware.api [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for the task: (returnval){ [ 1189.622059] env[68674]: value = "task-3241025" [ 1189.622059] env[68674]: _type = "Task" [ 1189.622059] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.629592] env[68674]: DEBUG oslo_vmware.api [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3241025, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.664567] env[68674]: INFO nova.compute.manager [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Took 22.43 seconds to build instance. [ 1189.674319] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f6edddc-7b26-4dcc-9043-a0e6df9f6f43 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.682087] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf851450-b340-4c17-aa2c-b02466266a51 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.714882] env[68674]: INFO nova.compute.manager [-] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Took 1.29 seconds to deallocate network for instance. [ 1189.726023] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6182f228-0b86-471e-b4be-12a903e2da74 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.726317] env[68674]: INFO nova.compute.manager [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Took 19.07 seconds to build instance. [ 1189.737197] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8de78cc7-12e5-4be2-8b3e-1a22867db2e1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.757568] env[68674]: DEBUG nova.compute.provider_tree [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1189.876549] env[68674]: DEBUG oslo_vmware.api [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52a1ae18-5ff7-afcf-4de3-56490ed7fa39, 'name': SearchDatastore_Task, 'duration_secs': 0.009534} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.876549] env[68674]: DEBUG oslo_concurrency.lockutils [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1189.876549] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1189.876549] env[68674]: DEBUG oslo_concurrency.lockutils [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1189.876549] env[68674]: DEBUG oslo_concurrency.lockutils [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1189.876549] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1189.876992] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-26a3cd5a-1ac2-4a55-958d-9eba0fd097dc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.884953] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1189.885169] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1189.885845] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8435ce99-4886-41ef-8816-9fb5d82b6b03 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.890686] env[68674]: DEBUG oslo_vmware.api [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1189.890686] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]520d7fd8-2187-9e1d-3b27-e33ed9d6ee8c" [ 1189.890686] env[68674]: _type = "Task" [ 1189.890686] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.898461] env[68674]: DEBUG oslo_vmware.api [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]520d7fd8-2187-9e1d-3b27-e33ed9d6ee8c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.009463] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1190.131169] env[68674]: DEBUG oslo_vmware.api [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3241025, 'name': Rename_Task, 'duration_secs': 0.334725} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.131450] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1190.131695] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e55512da-9913-4c8d-8e0b-92f03e6068f0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.138349] env[68674]: DEBUG oslo_vmware.api [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for the task: (returnval){ [ 1190.138349] env[68674]: value = "task-3241026" [ 1190.138349] env[68674]: _type = "Task" [ 1190.138349] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.145977] env[68674]: DEBUG oslo_vmware.api [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3241026, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.148351] env[68674]: INFO nova.compute.manager [None req-14854db4-adbf-47cd-83a6-5d38347423ef tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Resuming [ 1190.149079] env[68674]: DEBUG nova.objects.instance [None req-14854db4-adbf-47cd-83a6-5d38347423ef tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lazy-loading 'flavor' on Instance uuid 9b8aad00-0980-4752-954a-c09c9ae6f9ec {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1190.167404] env[68674]: DEBUG oslo_concurrency.lockutils [None req-707c7dca-52c3-494b-aae6-fc48f323882c tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Lock "0cbfda3e-337f-41f6-add2-1dcd725b0953" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.936s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1190.228850] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e0cb3143-09fa-4d87-acd9-6ae0a99881e4 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "1b276f5a-9e53-4ef9-892b-4e4bd0dc09df" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.581s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1190.233809] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ac408954-6269-4230-8ff7-447acf7d0842 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1190.260028] env[68674]: DEBUG nova.scheduler.client.report [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1190.401580] env[68674]: DEBUG oslo_vmware.api [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]520d7fd8-2187-9e1d-3b27-e33ed9d6ee8c, 'name': SearchDatastore_Task, 'duration_secs': 0.008892} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.402370] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c47e27d-b4dc-4600-bfa3-1870f2de5cac {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.408357] env[68674]: DEBUG oslo_vmware.api [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1190.408357] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52867a6a-5412-bb16-cf9e-19656d463bf7" [ 1190.408357] env[68674]: _type = "Task" [ 1190.408357] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.417092] env[68674]: DEBUG oslo_vmware.api [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52867a6a-5412-bb16-cf9e-19656d463bf7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.654723] env[68674]: DEBUG oslo_vmware.api [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3241026, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.766052] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.302s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1190.768163] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.759s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1190.770346] env[68674]: INFO nova.compute.claims [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1190.809626] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e7d55df6-f00a-4320-aa05-8219637a3a37 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Acquiring lock "0cbfda3e-337f-41f6-add2-1dcd725b0953" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1190.809984] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e7d55df6-f00a-4320-aa05-8219637a3a37 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Lock "0cbfda3e-337f-41f6-add2-1dcd725b0953" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1190.810672] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e7d55df6-f00a-4320-aa05-8219637a3a37 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Acquiring lock "0cbfda3e-337f-41f6-add2-1dcd725b0953-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1190.810672] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e7d55df6-f00a-4320-aa05-8219637a3a37 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Lock "0cbfda3e-337f-41f6-add2-1dcd725b0953-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1190.810672] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e7d55df6-f00a-4320-aa05-8219637a3a37 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Lock "0cbfda3e-337f-41f6-add2-1dcd725b0953-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1190.812909] env[68674]: INFO nova.compute.manager [None req-e7d55df6-f00a-4320-aa05-8219637a3a37 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Terminating instance [ 1190.822645] env[68674]: INFO nova.network.neutron [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Updating port 0f444395-3a03-4d13-9c2e-20a2965fcb9b with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1190.896632] env[68674]: DEBUG nova.compute.manager [req-14c409ad-e4f5-4b69-bdcd-7621ec6ad544 req-e9002b70-78ef-43ac-b2e7-7233b4db6a9b service nova] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Received event network-changed-43bd7986-ab0b-4dd8-a224-a42cd649e0d0 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1190.896847] env[68674]: DEBUG nova.compute.manager [req-14c409ad-e4f5-4b69-bdcd-7621ec6ad544 req-e9002b70-78ef-43ac-b2e7-7233b4db6a9b service nova] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Refreshing instance network info cache due to event network-changed-43bd7986-ab0b-4dd8-a224-a42cd649e0d0. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1190.897133] env[68674]: DEBUG oslo_concurrency.lockutils [req-14c409ad-e4f5-4b69-bdcd-7621ec6ad544 req-e9002b70-78ef-43ac-b2e7-7233b4db6a9b service nova] Acquiring lock "refresh_cache-1b276f5a-9e53-4ef9-892b-4e4bd0dc09df" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1190.897335] env[68674]: DEBUG oslo_concurrency.lockutils [req-14c409ad-e4f5-4b69-bdcd-7621ec6ad544 req-e9002b70-78ef-43ac-b2e7-7233b4db6a9b service nova] Acquired lock "refresh_cache-1b276f5a-9e53-4ef9-892b-4e4bd0dc09df" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1190.897530] env[68674]: DEBUG nova.network.neutron [req-14c409ad-e4f5-4b69-bdcd-7621ec6ad544 req-e9002b70-78ef-43ac-b2e7-7233b4db6a9b service nova] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Refreshing network info cache for port 43bd7986-ab0b-4dd8-a224-a42cd649e0d0 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1190.918483] env[68674]: DEBUG oslo_vmware.api [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52867a6a-5412-bb16-cf9e-19656d463bf7, 'name': SearchDatastore_Task, 'duration_secs': 0.0094} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.918807] env[68674]: DEBUG oslo_concurrency.lockutils [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1190.919110] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f/c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1190.919434] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6293fb3c-4566-47bd-b2cf-802bdb0c34d3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.926040] env[68674]: DEBUG oslo_vmware.api [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1190.926040] env[68674]: value = "task-3241027" [ 1190.926040] env[68674]: _type = "Task" [ 1190.926040] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.934847] env[68674]: DEBUG oslo_vmware.api [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241027, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.158295] env[68674]: DEBUG oslo_vmware.api [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3241026, 'name': PowerOnVM_Task, 'duration_secs': 0.767906} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.160617] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1191.160929] env[68674]: INFO nova.compute.manager [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Took 7.48 seconds to spawn the instance on the hypervisor. [ 1191.161240] env[68674]: DEBUG nova.compute.manager [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1191.162697] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a283ec1-7e59-4de7-a276-89040f79dc9b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.317796] env[68674]: DEBUG nova.compute.manager [None req-e7d55df6-f00a-4320-aa05-8219637a3a37 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1191.317981] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e7d55df6-f00a-4320-aa05-8219637a3a37 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1191.318870] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d11108ce-5a00-4e3a-a072-b14e1c93f6fb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.329521] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7d55df6-f00a-4320-aa05-8219637a3a37 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1191.329804] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3c204979-07f6-4c24-a564-058dac97bc33 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.336579] env[68674]: DEBUG oslo_vmware.api [None req-e7d55df6-f00a-4320-aa05-8219637a3a37 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Waiting for the task: (returnval){ [ 1191.336579] env[68674]: value = "task-3241028" [ 1191.336579] env[68674]: _type = "Task" [ 1191.336579] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.344586] env[68674]: DEBUG oslo_vmware.api [None req-e7d55df6-f00a-4320-aa05-8219637a3a37 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': task-3241028, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.437790] env[68674]: DEBUG oslo_vmware.api [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241027, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.426637} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.438083] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f/c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1191.438371] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1191.438662] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a763127e-cde9-4862-ac73-c8913aa31415 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.447459] env[68674]: DEBUG oslo_vmware.api [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1191.447459] env[68674]: value = "task-3241029" [ 1191.447459] env[68674]: _type = "Task" [ 1191.447459] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.459705] env[68674]: DEBUG oslo_vmware.api [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241029, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.663029] env[68674]: DEBUG oslo_concurrency.lockutils [None req-14854db4-adbf-47cd-83a6-5d38347423ef tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquiring lock "refresh_cache-9b8aad00-0980-4752-954a-c09c9ae6f9ec" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1191.663371] env[68674]: DEBUG oslo_concurrency.lockutils [None req-14854db4-adbf-47cd-83a6-5d38347423ef tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquired lock "refresh_cache-9b8aad00-0980-4752-954a-c09c9ae6f9ec" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1191.663371] env[68674]: DEBUG nova.network.neutron [None req-14854db4-adbf-47cd-83a6-5d38347423ef tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1191.684947] env[68674]: INFO nova.compute.manager [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Took 13.64 seconds to build instance. [ 1191.718384] env[68674]: DEBUG nova.network.neutron [req-14c409ad-e4f5-4b69-bdcd-7621ec6ad544 req-e9002b70-78ef-43ac-b2e7-7233b4db6a9b service nova] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Updated VIF entry in instance network info cache for port 43bd7986-ab0b-4dd8-a224-a42cd649e0d0. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1191.718831] env[68674]: DEBUG nova.network.neutron [req-14c409ad-e4f5-4b69-bdcd-7621ec6ad544 req-e9002b70-78ef-43ac-b2e7-7233b4db6a9b service nova] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Updating instance_info_cache with network_info: [{"id": "43bd7986-ab0b-4dd8-a224-a42cd649e0d0", "address": "fa:16:3e:84:76:d0", "network": {"id": "2141da47-c6b2-4270-9d0f-d999f7c26b83", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-460904531-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.222", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa34d6d90c6d46aaa2cb77259b5e0c27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b9aabc7c-0f6c-42eb-bd27-493a1496c0c8", "external-id": "nsx-vlan-transportzone-368", "segmentation_id": 368, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43bd7986-ab", "ovs_interfaceid": "43bd7986-ab0b-4dd8-a224-a42cd649e0d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1191.850304] env[68674]: DEBUG oslo_vmware.api [None req-e7d55df6-f00a-4320-aa05-8219637a3a37 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': task-3241028, 'name': PowerOffVM_Task, 'duration_secs': 0.195402} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.850680] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7d55df6-f00a-4320-aa05-8219637a3a37 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1191.850919] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e7d55df6-f00a-4320-aa05-8219637a3a37 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1191.851197] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fc1a26be-6ef9-4f9b-bcef-c8c7c0b360fd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.930668] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f73bf1c1-bfe3-490c-9a8b-00770820fab2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.939405] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0330ddd6-7fb4-4f37-9f6d-a95a2de0f44a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.976480] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddd3936b-d4fa-46f2-8678-927e74159620 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.978921] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e7d55df6-f00a-4320-aa05-8219637a3a37 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1191.979152] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e7d55df6-f00a-4320-aa05-8219637a3a37 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1191.979337] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7d55df6-f00a-4320-aa05-8219637a3a37 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Deleting the datastore file [datastore2] 0cbfda3e-337f-41f6-add2-1dcd725b0953 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1191.979737] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1b6f1d2a-ecfb-447e-b0c8-3e8be6db3c3a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.985733] env[68674]: DEBUG oslo_vmware.api [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241029, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068726} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.988553] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1191.988899] env[68674]: DEBUG oslo_vmware.api [None req-e7d55df6-f00a-4320-aa05-8219637a3a37 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Waiting for the task: (returnval){ [ 1191.988899] env[68674]: value = "task-3241031" [ 1191.988899] env[68674]: _type = "Task" [ 1191.988899] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.989601] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fe618c0-6afe-4c33-8d5c-8cfd4ca78cde {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.992845] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd61e4f9-f206-4a8f-b8be-c5f9c11f007d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.019615] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f/c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1192.029965] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d7b6eb96-1799-477a-af4f-ca921f84e07f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.046434] env[68674]: DEBUG nova.compute.provider_tree [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1192.046434] env[68674]: DEBUG oslo_vmware.api [None req-e7d55df6-f00a-4320-aa05-8219637a3a37 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': task-3241031, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.051528] env[68674]: DEBUG oslo_vmware.api [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1192.051528] env[68674]: value = "task-3241032" [ 1192.051528] env[68674]: _type = "Task" [ 1192.051528] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.062999] env[68674]: DEBUG oslo_vmware.api [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241032, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.189974] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9913465c-fa11-495f-8809-593874ff52b6 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "f4751bd8-e0df-4686-a22f-e51a4a98b8d6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.162s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1192.221789] env[68674]: DEBUG oslo_concurrency.lockutils [req-14c409ad-e4f5-4b69-bdcd-7621ec6ad544 req-e9002b70-78ef-43ac-b2e7-7233b4db6a9b service nova] Releasing lock "refresh_cache-1b276f5a-9e53-4ef9-892b-4e4bd0dc09df" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1192.227795] env[68674]: DEBUG nova.compute.manager [req-8b018198-3f51-4697-9a38-af5d98010576 req-6469d290-a3ae-4f63-915d-e8c32a6f7957 service nova] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Received event network-vif-plugged-0f444395-3a03-4d13-9c2e-20a2965fcb9b {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1192.228031] env[68674]: DEBUG oslo_concurrency.lockutils [req-8b018198-3f51-4697-9a38-af5d98010576 req-6469d290-a3ae-4f63-915d-e8c32a6f7957 service nova] Acquiring lock "8f183286-f908-4d05-9a61-d6b1bf10dfb9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1192.228287] env[68674]: DEBUG oslo_concurrency.lockutils [req-8b018198-3f51-4697-9a38-af5d98010576 req-6469d290-a3ae-4f63-915d-e8c32a6f7957 service nova] Lock "8f183286-f908-4d05-9a61-d6b1bf10dfb9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1192.228438] env[68674]: DEBUG oslo_concurrency.lockutils [req-8b018198-3f51-4697-9a38-af5d98010576 req-6469d290-a3ae-4f63-915d-e8c32a6f7957 service nova] Lock "8f183286-f908-4d05-9a61-d6b1bf10dfb9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1192.228619] env[68674]: DEBUG nova.compute.manager [req-8b018198-3f51-4697-9a38-af5d98010576 req-6469d290-a3ae-4f63-915d-e8c32a6f7957 service nova] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] No waiting events found dispatching network-vif-plugged-0f444395-3a03-4d13-9c2e-20a2965fcb9b {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1192.228790] env[68674]: WARNING nova.compute.manager [req-8b018198-3f51-4697-9a38-af5d98010576 req-6469d290-a3ae-4f63-915d-e8c32a6f7957 service nova] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Received unexpected event network-vif-plugged-0f444395-3a03-4d13-9c2e-20a2965fcb9b for instance with vm_state shelved_offloaded and task_state spawning. [ 1192.311939] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "refresh_cache-8f183286-f908-4d05-9a61-d6b1bf10dfb9" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1192.312141] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquired lock "refresh_cache-8f183286-f908-4d05-9a61-d6b1bf10dfb9" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1192.312320] env[68674]: DEBUG nova.network.neutron [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1192.501604] env[68674]: DEBUG oslo_vmware.api [None req-e7d55df6-f00a-4320-aa05-8219637a3a37 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Task: {'id': task-3241031, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152086} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.501860] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7d55df6-f00a-4320-aa05-8219637a3a37 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1192.502061] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e7d55df6-f00a-4320-aa05-8219637a3a37 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1192.502254] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-e7d55df6-f00a-4320-aa05-8219637a3a37 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1192.502436] env[68674]: INFO nova.compute.manager [None req-e7d55df6-f00a-4320-aa05-8219637a3a37 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1192.502677] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e7d55df6-f00a-4320-aa05-8219637a3a37 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1192.502862] env[68674]: DEBUG nova.compute.manager [-] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1192.502957] env[68674]: DEBUG nova.network.neutron [-] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1192.547633] env[68674]: DEBUG nova.scheduler.client.report [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1192.561624] env[68674]: DEBUG oslo_vmware.api [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241032, 'name': ReconfigVM_Task, 'duration_secs': 0.277796} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.567093] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Reconfigured VM instance instance-00000075 to attach disk [datastore1] c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f/c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1192.567093] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4f41f7fb-3afe-4ec6-84c7-b302cffe6a2a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.571757] env[68674]: DEBUG oslo_vmware.api [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1192.571757] env[68674]: value = "task-3241033" [ 1192.571757] env[68674]: _type = "Task" [ 1192.571757] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.579626] env[68674]: DEBUG oslo_vmware.api [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241033, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.782752] env[68674]: DEBUG nova.network.neutron [None req-14854db4-adbf-47cd-83a6-5d38347423ef tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Updating instance_info_cache with network_info: [{"id": "f751e885-e868-4e41-a9e7-de64b20c643c", "address": "fa:16:3e:cb:db:79", "network": {"id": "dd6a13cc-564e-4e30-a518-536c9c1a1c8d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2104984174-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fcfc3ecd6aa74705aefa88d7a95361a0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf751e885-e8", "ovs_interfaceid": "f751e885-e868-4e41-a9e7-de64b20c643c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1192.998248] env[68674]: DEBUG nova.compute.manager [req-522140a9-fbcb-4432-8d35-df12fb20bff4 req-068ff9f6-85cf-4256-89b3-e5318dd2b6e9 service nova] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Received event network-changed-393444bd-993f-4249-b58b-e01020f12db7 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1192.998680] env[68674]: DEBUG nova.compute.manager [req-522140a9-fbcb-4432-8d35-df12fb20bff4 req-068ff9f6-85cf-4256-89b3-e5318dd2b6e9 service nova] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Refreshing instance network info cache due to event network-changed-393444bd-993f-4249-b58b-e01020f12db7. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1192.998983] env[68674]: DEBUG oslo_concurrency.lockutils [req-522140a9-fbcb-4432-8d35-df12fb20bff4 req-068ff9f6-85cf-4256-89b3-e5318dd2b6e9 service nova] Acquiring lock "refresh_cache-f4751bd8-e0df-4686-a22f-e51a4a98b8d6" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1192.999292] env[68674]: DEBUG oslo_concurrency.lockutils [req-522140a9-fbcb-4432-8d35-df12fb20bff4 req-068ff9f6-85cf-4256-89b3-e5318dd2b6e9 service nova] Acquired lock "refresh_cache-f4751bd8-e0df-4686-a22f-e51a4a98b8d6" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1192.999529] env[68674]: DEBUG nova.network.neutron [req-522140a9-fbcb-4432-8d35-df12fb20bff4 req-068ff9f6-85cf-4256-89b3-e5318dd2b6e9 service nova] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Refreshing network info cache for port 393444bd-993f-4249-b58b-e01020f12db7 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1193.052268] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.284s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.052876] env[68674]: DEBUG nova.compute.manager [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1193.057499] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ac408954-6269-4230-8ff7-447acf7d0842 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.824s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.057768] env[68674]: DEBUG nova.objects.instance [None req-ac408954-6269-4230-8ff7-447acf7d0842 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lazy-loading 'resources' on Instance uuid dbbf1313-6e44-45e2-8bf6-83409f06cb4b {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1193.081789] env[68674]: DEBUG oslo_vmware.api [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241033, 'name': Rename_Task, 'duration_secs': 0.142706} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.082214] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1193.082457] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-18e9a765-a043-4109-a017-f5bc767dd7be {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.089419] env[68674]: DEBUG oslo_vmware.api [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1193.089419] env[68674]: value = "task-3241034" [ 1193.089419] env[68674]: _type = "Task" [ 1193.089419] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.097978] env[68674]: DEBUG oslo_vmware.api [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241034, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.152486] env[68674]: DEBUG nova.network.neutron [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Updating instance_info_cache with network_info: [{"id": "0f444395-3a03-4d13-9c2e-20a2965fcb9b", "address": "fa:16:3e:34:66:f6", "network": {"id": "cd9a6296-fa96-4117-b8b5-3884d0d16745", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1543887384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61ea6bfeb37d470a970e9c98e4827ade", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f444395-3a", "ovs_interfaceid": "0f444395-3a03-4d13-9c2e-20a2965fcb9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1193.292706] env[68674]: DEBUG oslo_concurrency.lockutils [None req-14854db4-adbf-47cd-83a6-5d38347423ef tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Releasing lock "refresh_cache-9b8aad00-0980-4752-954a-c09c9ae6f9ec" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1193.293982] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25d555d2-7d78-4224-afa7-715348dc22f1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.300676] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-14854db4-adbf-47cd-83a6-5d38347423ef tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Resuming the VM {{(pid=68674) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1193.300917] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d7a4ad6e-ee99-4375-b0ae-2f760b2fa1de {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.307626] env[68674]: DEBUG oslo_vmware.api [None req-14854db4-adbf-47cd-83a6-5d38347423ef tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1193.307626] env[68674]: value = "task-3241035" [ 1193.307626] env[68674]: _type = "Task" [ 1193.307626] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.314898] env[68674]: DEBUG oslo_vmware.api [None req-14854db4-adbf-47cd-83a6-5d38347423ef tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3241035, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.548236] env[68674]: DEBUG nova.network.neutron [-] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1193.558439] env[68674]: DEBUG nova.compute.utils [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1193.559977] env[68674]: DEBUG nova.compute.manager [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1193.560167] env[68674]: DEBUG nova.network.neutron [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1193.604362] env[68674]: DEBUG oslo_vmware.api [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241034, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.609483] env[68674]: DEBUG nova.policy [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd28e9b76e01f463bbb375cbd9c51684f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '81afe76c94de4e94b53f15af0ef95e66', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 1193.657534] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Releasing lock "refresh_cache-8f183286-f908-4d05-9a61-d6b1bf10dfb9" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1193.688594] env[68674]: DEBUG nova.virt.hardware [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='350336df4b5dc278f97f601ead17e00a',container_format='bare',created_at=2025-04-03T08:16:58Z,direct_url=,disk_format='vmdk',id=62894887-4050-4053-af59-8bdae4e2d98c,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-1135725583-shelved',owner='61ea6bfeb37d470a970e9c98e4827ade',properties=ImageMetaProps,protected=,size=31667712,status='active',tags=,updated_at=2025-04-03T08:17:16Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1193.688873] env[68674]: DEBUG nova.virt.hardware [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1193.689109] env[68674]: DEBUG nova.virt.hardware [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1193.689309] env[68674]: DEBUG nova.virt.hardware [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1193.689462] env[68674]: DEBUG nova.virt.hardware [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1193.689615] env[68674]: DEBUG nova.virt.hardware [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1193.689831] env[68674]: DEBUG nova.virt.hardware [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1193.690054] env[68674]: DEBUG nova.virt.hardware [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1193.690241] env[68674]: DEBUG nova.virt.hardware [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1193.690425] env[68674]: DEBUG nova.virt.hardware [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1193.690625] env[68674]: DEBUG nova.virt.hardware [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1193.691950] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9a40bac-c53c-4e60-be28-959310eefc13 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.706711] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4d78446-fa2e-45c7-897f-d7147ae64892 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.722401] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:34:66:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '40c947c4-f471-4d48-8e43-fee54198107e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0f444395-3a03-4d13-9c2e-20a2965fcb9b', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1193.730610] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1193.736300] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1193.736871] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f6a89a95-2750-4170-8f60-788c055b9cde {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.762940] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1193.762940] env[68674]: value = "task-3241036" [ 1193.762940] env[68674]: _type = "Task" [ 1193.762940] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.777022] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241036, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.792996] env[68674]: DEBUG nova.network.neutron [req-522140a9-fbcb-4432-8d35-df12fb20bff4 req-068ff9f6-85cf-4256-89b3-e5318dd2b6e9 service nova] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Updated VIF entry in instance network info cache for port 393444bd-993f-4249-b58b-e01020f12db7. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1193.792996] env[68674]: DEBUG nova.network.neutron [req-522140a9-fbcb-4432-8d35-df12fb20bff4 req-068ff9f6-85cf-4256-89b3-e5318dd2b6e9 service nova] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Updating instance_info_cache with network_info: [{"id": "393444bd-993f-4249-b58b-e01020f12db7", "address": "fa:16:3e:b8:db:7c", "network": {"id": "e5c1d0d2-3458-4788-9640-4e14ad781436", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1292108367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.178", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9bc5a5f88cdd441fbb0df17cab2fcecc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8cbc9b8f-ce19-4262-bf4d-88cd4f259a1c", "external-id": "nsx-vlan-transportzone-630", "segmentation_id": 630, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap393444bd-99", "ovs_interfaceid": "393444bd-993f-4249-b58b-e01020f12db7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1193.800323] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bfd7080-0170-40a8-aa37-b65720c5caf1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.817969] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91e963f3-310e-4022-8cbf-7885da5df677 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.828328] env[68674]: DEBUG oslo_vmware.api [None req-14854db4-adbf-47cd-83a6-5d38347423ef tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3241035, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.856017] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ed0cda-a805-4a43-8e7e-be28a51f4dce {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.866964] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f60c9ef0-bdf6-41dc-8c10-4ed39f59a119 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.884960] env[68674]: DEBUG nova.compute.provider_tree [None req-ac408954-6269-4230-8ff7-447acf7d0842 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1194.052426] env[68674]: INFO nova.compute.manager [-] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Took 1.55 seconds to deallocate network for instance. [ 1194.063911] env[68674]: DEBUG nova.compute.manager [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1194.108264] env[68674]: DEBUG oslo_vmware.api [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241034, 'name': PowerOnVM_Task} progress is 94%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.197820] env[68674]: DEBUG nova.network.neutron [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Successfully created port: 6fd40f2b-0d7b-41d0-bb6e-a3b878a8ee66 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1194.258788] env[68674]: DEBUG nova.compute.manager [req-6ab7ef3d-bd82-4141-acd5-95299d56eb13 req-8189ce08-ef8d-434b-955e-5c5bd684eff4 service nova] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Received event network-changed-0f444395-3a03-4d13-9c2e-20a2965fcb9b {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1194.258999] env[68674]: DEBUG nova.compute.manager [req-6ab7ef3d-bd82-4141-acd5-95299d56eb13 req-8189ce08-ef8d-434b-955e-5c5bd684eff4 service nova] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Refreshing instance network info cache due to event network-changed-0f444395-3a03-4d13-9c2e-20a2965fcb9b. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1194.259579] env[68674]: DEBUG oslo_concurrency.lockutils [req-6ab7ef3d-bd82-4141-acd5-95299d56eb13 req-8189ce08-ef8d-434b-955e-5c5bd684eff4 service nova] Acquiring lock "refresh_cache-8f183286-f908-4d05-9a61-d6b1bf10dfb9" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1194.259747] env[68674]: DEBUG oslo_concurrency.lockutils [req-6ab7ef3d-bd82-4141-acd5-95299d56eb13 req-8189ce08-ef8d-434b-955e-5c5bd684eff4 service nova] Acquired lock "refresh_cache-8f183286-f908-4d05-9a61-d6b1bf10dfb9" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1194.259968] env[68674]: DEBUG nova.network.neutron [req-6ab7ef3d-bd82-4141-acd5-95299d56eb13 req-8189ce08-ef8d-434b-955e-5c5bd684eff4 service nova] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Refreshing network info cache for port 0f444395-3a03-4d13-9c2e-20a2965fcb9b {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1194.276207] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241036, 'name': CreateVM_Task} progress is 25%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.296533] env[68674]: DEBUG oslo_concurrency.lockutils [req-522140a9-fbcb-4432-8d35-df12fb20bff4 req-068ff9f6-85cf-4256-89b3-e5318dd2b6e9 service nova] Releasing lock "refresh_cache-f4751bd8-e0df-4686-a22f-e51a4a98b8d6" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1194.320950] env[68674]: DEBUG oslo_vmware.api [None req-14854db4-adbf-47cd-83a6-5d38347423ef tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3241035, 'name': PowerOnVM_Task, 'duration_secs': 0.552436} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.321255] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-14854db4-adbf-47cd-83a6-5d38347423ef tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Resumed the VM {{(pid=68674) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1194.321435] env[68674]: DEBUG nova.compute.manager [None req-14854db4-adbf-47cd-83a6-5d38347423ef tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1194.322235] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6346f4c5-5ba6-4751-b49b-cd5a6f602f81 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.388779] env[68674]: DEBUG nova.scheduler.client.report [None req-ac408954-6269-4230-8ff7-447acf7d0842 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1194.563982] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e7d55df6-f00a-4320-aa05-8219637a3a37 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1194.601323] env[68674]: DEBUG oslo_vmware.api [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241034, 'name': PowerOnVM_Task, 'duration_secs': 1.062607} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.601672] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1194.601918] env[68674]: DEBUG nova.compute.manager [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1194.602856] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a2c68e0-393f-4bbe-bbf6-612b18fee733 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.773985] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241036, 'name': CreateVM_Task, 'duration_secs': 0.671099} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.774219] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1194.775039] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/62894887-4050-4053-af59-8bdae4e2d98c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1194.775185] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquired lock "[datastore2] devstack-image-cache_base/62894887-4050-4053-af59-8bdae4e2d98c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1194.775441] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/62894887-4050-4053-af59-8bdae4e2d98c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1194.775702] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3afc715-140b-4c5d-a512-54e29a3471ae {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.780974] env[68674]: DEBUG oslo_vmware.api [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1194.780974] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523c4bbf-84de-b4a1-38bc-4ef83c86c427" [ 1194.780974] env[68674]: _type = "Task" [ 1194.780974] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.788541] env[68674]: DEBUG oslo_vmware.api [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523c4bbf-84de-b4a1-38bc-4ef83c86c427, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.894198] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ac408954-6269-4230-8ff7-447acf7d0842 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.837s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.896455] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e7d55df6-f00a-4320-aa05-8219637a3a37 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.332s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1194.896455] env[68674]: DEBUG nova.objects.instance [None req-e7d55df6-f00a-4320-aa05-8219637a3a37 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Lazy-loading 'resources' on Instance uuid 0cbfda3e-337f-41f6-add2-1dcd725b0953 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1194.917121] env[68674]: INFO nova.scheduler.client.report [None req-ac408954-6269-4230-8ff7-447acf7d0842 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Deleted allocations for instance dbbf1313-6e44-45e2-8bf6-83409f06cb4b [ 1195.013201] env[68674]: DEBUG nova.network.neutron [req-6ab7ef3d-bd82-4141-acd5-95299d56eb13 req-8189ce08-ef8d-434b-955e-5c5bd684eff4 service nova] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Updated VIF entry in instance network info cache for port 0f444395-3a03-4d13-9c2e-20a2965fcb9b. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1195.013596] env[68674]: DEBUG nova.network.neutron [req-6ab7ef3d-bd82-4141-acd5-95299d56eb13 req-8189ce08-ef8d-434b-955e-5c5bd684eff4 service nova] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Updating instance_info_cache with network_info: [{"id": "0f444395-3a03-4d13-9c2e-20a2965fcb9b", "address": "fa:16:3e:34:66:f6", "network": {"id": "cd9a6296-fa96-4117-b8b5-3884d0d16745", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1543887384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61ea6bfeb37d470a970e9c98e4827ade", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f444395-3a", "ovs_interfaceid": "0f444395-3a03-4d13-9c2e-20a2965fcb9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1195.074730] env[68674]: DEBUG nova.compute.manager [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1195.100452] env[68674]: DEBUG nova.virt.hardware [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1195.100755] env[68674]: DEBUG nova.virt.hardware [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1195.100965] env[68674]: DEBUG nova.virt.hardware [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1195.101255] env[68674]: DEBUG nova.virt.hardware [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1195.101438] env[68674]: DEBUG nova.virt.hardware [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1195.101594] env[68674]: DEBUG nova.virt.hardware [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1195.101838] env[68674]: DEBUG nova.virt.hardware [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1195.102027] env[68674]: DEBUG nova.virt.hardware [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1195.102210] env[68674]: DEBUG nova.virt.hardware [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1195.102378] env[68674]: DEBUG nova.virt.hardware [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1195.102559] env[68674]: DEBUG nova.virt.hardware [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1195.103486] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53b3e9e6-19f7-42a9-ab6a-672088ff436c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.111578] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6543ccf5-b805-4f03-adee-c0a1d6a0f29d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.129373] env[68674]: DEBUG oslo_concurrency.lockutils [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1195.293086] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Releasing lock "[datastore2] devstack-image-cache_base/62894887-4050-4053-af59-8bdae4e2d98c" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1195.293354] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Processing image 62894887-4050-4053-af59-8bdae4e2d98c {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1195.293591] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/62894887-4050-4053-af59-8bdae4e2d98c/62894887-4050-4053-af59-8bdae4e2d98c.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1195.293739] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquired lock "[datastore2] devstack-image-cache_base/62894887-4050-4053-af59-8bdae4e2d98c/62894887-4050-4053-af59-8bdae4e2d98c.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1195.293937] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1195.294215] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ea8d2bfe-1795-45d2-9b78-29dbc27706b9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.317936] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1195.318145] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1195.318884] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53236d5d-f813-470f-9438-fa24d8c9eeda {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.324432] env[68674]: DEBUG oslo_vmware.api [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1195.324432] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52e89eba-7e60-3b89-63a0-2ce3de10d753" [ 1195.324432] env[68674]: _type = "Task" [ 1195.324432] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.336811] env[68674]: DEBUG oslo_vmware.api [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52e89eba-7e60-3b89-63a0-2ce3de10d753, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.423542] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ac408954-6269-4230-8ff7-447acf7d0842 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "dbbf1313-6e44-45e2-8bf6-83409f06cb4b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.677s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1195.516624] env[68674]: DEBUG oslo_concurrency.lockutils [req-6ab7ef3d-bd82-4141-acd5-95299d56eb13 req-8189ce08-ef8d-434b-955e-5c5bd684eff4 service nova] Releasing lock "refresh_cache-8f183286-f908-4d05-9a61-d6b1bf10dfb9" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1195.516999] env[68674]: DEBUG nova.compute.manager [req-6ab7ef3d-bd82-4141-acd5-95299d56eb13 req-8189ce08-ef8d-434b-955e-5c5bd684eff4 service nova] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Received event network-vif-deleted-f90355a4-7ff9-447e-8dad-d7b710e9c578 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1195.517277] env[68674]: DEBUG nova.compute.manager [req-6ab7ef3d-bd82-4141-acd5-95299d56eb13 req-8189ce08-ef8d-434b-955e-5c5bd684eff4 service nova] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Received event network-vif-deleted-2d97bf60-6b9f-4ad9-91f0-5a9e0d91e019 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1195.522532] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae22eb95-b11c-4737-a196-3b3736136d5a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.529986] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9296a93-712b-4cfa-9ceb-127c68b89e38 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.560749] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa6d498d-48a7-47cf-817d-b8c1976df679 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.567925] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cbb5c7b-eab3-4888-83a7-21fc9c21e72c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.580672] env[68674]: DEBUG nova.compute.provider_tree [None req-e7d55df6-f00a-4320-aa05-8219637a3a37 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1195.728297] env[68674]: DEBUG nova.network.neutron [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Successfully updated port: 6fd40f2b-0d7b-41d0-bb6e-a3b878a8ee66 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1195.835041] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Preparing fetch location {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1195.835343] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Fetch image to [datastore2] OSTACK_IMG_7e87df1b-b0d2-4ed6-94ea-e7ff42d7a3ee/OSTACK_IMG_7e87df1b-b0d2-4ed6-94ea-e7ff42d7a3ee.vmdk {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1195.835593] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Downloading stream optimized image 62894887-4050-4053-af59-8bdae4e2d98c to [datastore2] OSTACK_IMG_7e87df1b-b0d2-4ed6-94ea-e7ff42d7a3ee/OSTACK_IMG_7e87df1b-b0d2-4ed6-94ea-e7ff42d7a3ee.vmdk on the data store datastore2 as vApp {{(pid=68674) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1195.835760] env[68674]: DEBUG nova.virt.vmwareapi.images [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Downloading image file data 62894887-4050-4053-af59-8bdae4e2d98c to the ESX as VM named 'OSTACK_IMG_7e87df1b-b0d2-4ed6-94ea-e7ff42d7a3ee' {{(pid=68674) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1195.905331] env[68674]: DEBUG oslo_vmware.rw_handles [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1195.905331] env[68674]: value = "resgroup-9" [ 1195.905331] env[68674]: _type = "ResourcePool" [ 1195.905331] env[68674]: }. {{(pid=68674) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1195.905702] env[68674]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-d38dc594-5836-4585-a83a-6ebfe403a365 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.928636] env[68674]: DEBUG oslo_vmware.rw_handles [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lease: (returnval){ [ 1195.928636] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d7b3cd-7b63-8e23-e719-a839cf4854c4" [ 1195.928636] env[68674]: _type = "HttpNfcLease" [ 1195.928636] env[68674]: } obtained for vApp import into resource pool (val){ [ 1195.928636] env[68674]: value = "resgroup-9" [ 1195.928636] env[68674]: _type = "ResourcePool" [ 1195.928636] env[68674]: }. {{(pid=68674) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1195.928966] env[68674]: DEBUG oslo_vmware.api [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the lease: (returnval){ [ 1195.928966] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d7b3cd-7b63-8e23-e719-a839cf4854c4" [ 1195.928966] env[68674]: _type = "HttpNfcLease" [ 1195.928966] env[68674]: } to be ready. {{(pid=68674) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1195.936503] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1195.936503] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d7b3cd-7b63-8e23-e719-a839cf4854c4" [ 1195.936503] env[68674]: _type = "HttpNfcLease" [ 1195.936503] env[68674]: } is initializing. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1196.083678] env[68674]: DEBUG nova.scheduler.client.report [None req-e7d55df6-f00a-4320-aa05-8219637a3a37 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1196.231531] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "refresh_cache-c2728961-9f06-4494-9c48-dd096eae8b4e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.231691] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquired lock "refresh_cache-c2728961-9f06-4494-9c48-dd096eae8b4e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1196.231829] env[68674]: DEBUG nova.network.neutron [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1196.286631] env[68674]: DEBUG nova.compute.manager [req-8a82d9a5-6d54-4c54-bc7b-b50086dcba4b req-9c4e3bec-9961-4306-a37b-1233329e596d service nova] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Received event network-vif-plugged-6fd40f2b-0d7b-41d0-bb6e-a3b878a8ee66 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1196.286831] env[68674]: DEBUG oslo_concurrency.lockutils [req-8a82d9a5-6d54-4c54-bc7b-b50086dcba4b req-9c4e3bec-9961-4306-a37b-1233329e596d service nova] Acquiring lock "c2728961-9f06-4494-9c48-dd096eae8b4e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1196.287088] env[68674]: DEBUG oslo_concurrency.lockutils [req-8a82d9a5-6d54-4c54-bc7b-b50086dcba4b req-9c4e3bec-9961-4306-a37b-1233329e596d service nova] Lock "c2728961-9f06-4494-9c48-dd096eae8b4e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1196.287264] env[68674]: DEBUG oslo_concurrency.lockutils [req-8a82d9a5-6d54-4c54-bc7b-b50086dcba4b req-9c4e3bec-9961-4306-a37b-1233329e596d service nova] Lock "c2728961-9f06-4494-9c48-dd096eae8b4e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1196.287428] env[68674]: DEBUG nova.compute.manager [req-8a82d9a5-6d54-4c54-bc7b-b50086dcba4b req-9c4e3bec-9961-4306-a37b-1233329e596d service nova] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] No waiting events found dispatching network-vif-plugged-6fd40f2b-0d7b-41d0-bb6e-a3b878a8ee66 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1196.287584] env[68674]: WARNING nova.compute.manager [req-8a82d9a5-6d54-4c54-bc7b-b50086dcba4b req-9c4e3bec-9961-4306-a37b-1233329e596d service nova] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Received unexpected event network-vif-plugged-6fd40f2b-0d7b-41d0-bb6e-a3b878a8ee66 for instance with vm_state building and task_state spawning. [ 1196.287757] env[68674]: DEBUG nova.compute.manager [req-8a82d9a5-6d54-4c54-bc7b-b50086dcba4b req-9c4e3bec-9961-4306-a37b-1233329e596d service nova] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Received event network-changed-6fd40f2b-0d7b-41d0-bb6e-a3b878a8ee66 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1196.287886] env[68674]: DEBUG nova.compute.manager [req-8a82d9a5-6d54-4c54-bc7b-b50086dcba4b req-9c4e3bec-9961-4306-a37b-1233329e596d service nova] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Refreshing instance network info cache due to event network-changed-6fd40f2b-0d7b-41d0-bb6e-a3b878a8ee66. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1196.288095] env[68674]: DEBUG oslo_concurrency.lockutils [req-8a82d9a5-6d54-4c54-bc7b-b50086dcba4b req-9c4e3bec-9961-4306-a37b-1233329e596d service nova] Acquiring lock "refresh_cache-c2728961-9f06-4494-9c48-dd096eae8b4e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.437618] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1196.437618] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d7b3cd-7b63-8e23-e719-a839cf4854c4" [ 1196.437618] env[68674]: _type = "HttpNfcLease" [ 1196.437618] env[68674]: } is initializing. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1196.589178] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e7d55df6-f00a-4320-aa05-8219637a3a37 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.693s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1196.592044] env[68674]: DEBUG oslo_concurrency.lockutils [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 1.462s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1196.592370] env[68674]: DEBUG nova.objects.instance [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68674) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1196.613041] env[68674]: INFO nova.scheduler.client.report [None req-e7d55df6-f00a-4320-aa05-8219637a3a37 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Deleted allocations for instance 0cbfda3e-337f-41f6-add2-1dcd725b0953 [ 1196.772996] env[68674]: DEBUG nova.network.neutron [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1196.902981] env[68674]: DEBUG nova.network.neutron [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Updating instance_info_cache with network_info: [{"id": "6fd40f2b-0d7b-41d0-bb6e-a3b878a8ee66", "address": "fa:16:3e:3b:21:bc", "network": {"id": "14f41484-287c-4789-9e0c-fcc5f0e92e0d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-787923662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81afe76c94de4e94b53f15af0ef95e66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "877ed63d-906e-4bd5-a1fc-7e82d172d41e", "external-id": "nsx-vlan-transportzone-642", "segmentation_id": 642, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fd40f2b-0d", "ovs_interfaceid": "6fd40f2b-0d7b-41d0-bb6e-a3b878a8ee66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1196.940059] env[68674]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1196.940059] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d7b3cd-7b63-8e23-e719-a839cf4854c4" [ 1196.940059] env[68674]: _type = "HttpNfcLease" [ 1196.940059] env[68674]: } is ready. {{(pid=68674) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1196.940059] env[68674]: DEBUG oslo_vmware.rw_handles [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1196.940059] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52d7b3cd-7b63-8e23-e719-a839cf4854c4" [ 1196.940059] env[68674]: _type = "HttpNfcLease" [ 1196.940059] env[68674]: }. {{(pid=68674) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1196.940875] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d0632bb-eec3-4d1d-83bf-26f893771c13 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.947504] env[68674]: DEBUG oslo_vmware.rw_handles [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5256af8a-bacc-22a6-3a91-e9367052053f/disk-0.vmdk from lease info. {{(pid=68674) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1196.947684] env[68674]: DEBUG oslo_vmware.rw_handles [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Creating HTTP connection to write to file with size = 31667712 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5256af8a-bacc-22a6-3a91-e9367052053f/disk-0.vmdk. {{(pid=68674) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1197.011861] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-7b9f0c64-16a2-43ee-9a48-8f624af85797 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.119646] env[68674]: DEBUG oslo_concurrency.lockutils [None req-e7d55df6-f00a-4320-aa05-8219637a3a37 tempest-ServersTestMultiNic-1657948868 tempest-ServersTestMultiNic-1657948868-project-member] Lock "0cbfda3e-337f-41f6-add2-1dcd725b0953" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.310s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1197.408185] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Releasing lock "refresh_cache-c2728961-9f06-4494-9c48-dd096eae8b4e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1197.408475] env[68674]: DEBUG nova.compute.manager [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Instance network_info: |[{"id": "6fd40f2b-0d7b-41d0-bb6e-a3b878a8ee66", "address": "fa:16:3e:3b:21:bc", "network": {"id": "14f41484-287c-4789-9e0c-fcc5f0e92e0d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-787923662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81afe76c94de4e94b53f15af0ef95e66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "877ed63d-906e-4bd5-a1fc-7e82d172d41e", "external-id": "nsx-vlan-transportzone-642", "segmentation_id": 642, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fd40f2b-0d", "ovs_interfaceid": "6fd40f2b-0d7b-41d0-bb6e-a3b878a8ee66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1197.408901] env[68674]: DEBUG oslo_concurrency.lockutils [req-8a82d9a5-6d54-4c54-bc7b-b50086dcba4b req-9c4e3bec-9961-4306-a37b-1233329e596d service nova] Acquired lock "refresh_cache-c2728961-9f06-4494-9c48-dd096eae8b4e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1197.409037] env[68674]: DEBUG nova.network.neutron [req-8a82d9a5-6d54-4c54-bc7b-b50086dcba4b req-9c4e3bec-9961-4306-a37b-1233329e596d service nova] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Refreshing network info cache for port 6fd40f2b-0d7b-41d0-bb6e-a3b878a8ee66 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1197.410298] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3b:21:bc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '877ed63d-906e-4bd5-a1fc-7e82d172d41e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6fd40f2b-0d7b-41d0-bb6e-a3b878a8ee66', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1197.419467] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1197.426128] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1197.426706] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-59ce8b8d-b051-46d3-a22c-34bd7eb7e4dd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.452971] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1197.452971] env[68674]: value = "task-3241039" [ 1197.452971] env[68674]: _type = "Task" [ 1197.452971] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.465034] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241039, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.603036] env[68674]: DEBUG oslo_concurrency.lockutils [None req-556356ab-73e4-4db7-bd31-eb3bb935f41b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1197.667063] env[68674]: DEBUG nova.network.neutron [req-8a82d9a5-6d54-4c54-bc7b-b50086dcba4b req-9c4e3bec-9961-4306-a37b-1233329e596d service nova] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Updated VIF entry in instance network info cache for port 6fd40f2b-0d7b-41d0-bb6e-a3b878a8ee66. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1197.667532] env[68674]: DEBUG nova.network.neutron [req-8a82d9a5-6d54-4c54-bc7b-b50086dcba4b req-9c4e3bec-9961-4306-a37b-1233329e596d service nova] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Updating instance_info_cache with network_info: [{"id": "6fd40f2b-0d7b-41d0-bb6e-a3b878a8ee66", "address": "fa:16:3e:3b:21:bc", "network": {"id": "14f41484-287c-4789-9e0c-fcc5f0e92e0d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-787923662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81afe76c94de4e94b53f15af0ef95e66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "877ed63d-906e-4bd5-a1fc-7e82d172d41e", "external-id": "nsx-vlan-transportzone-642", "segmentation_id": 642, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fd40f2b-0d", "ovs_interfaceid": "6fd40f2b-0d7b-41d0-bb6e-a3b878a8ee66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1197.774914] env[68674]: DEBUG oslo_concurrency.lockutils [None req-99982ab2-aeb0-49e1-b00d-2bdf637fc178 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "23891bad-1b63-4237-9243-78954cf67d52" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1197.775563] env[68674]: DEBUG oslo_concurrency.lockutils [None req-99982ab2-aeb0-49e1-b00d-2bdf637fc178 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "23891bad-1b63-4237-9243-78954cf67d52" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1197.775819] env[68674]: DEBUG oslo_concurrency.lockutils [None req-99982ab2-aeb0-49e1-b00d-2bdf637fc178 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "23891bad-1b63-4237-9243-78954cf67d52-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1197.776077] env[68674]: DEBUG oslo_concurrency.lockutils [None req-99982ab2-aeb0-49e1-b00d-2bdf637fc178 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "23891bad-1b63-4237-9243-78954cf67d52-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1197.776253] env[68674]: DEBUG oslo_concurrency.lockutils [None req-99982ab2-aeb0-49e1-b00d-2bdf637fc178 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "23891bad-1b63-4237-9243-78954cf67d52-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1197.778511] env[68674]: INFO nova.compute.manager [None req-99982ab2-aeb0-49e1-b00d-2bdf637fc178 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Terminating instance [ 1197.965853] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241039, 'name': CreateVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.114216] env[68674]: DEBUG oslo_vmware.rw_handles [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Completed reading data from the image iterator. {{(pid=68674) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1198.114483] env[68674]: DEBUG oslo_vmware.rw_handles [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5256af8a-bacc-22a6-3a91-e9367052053f/disk-0.vmdk. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1198.115474] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74109512-7684-4842-8951-319b4377db44 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.122206] env[68674]: DEBUG oslo_vmware.rw_handles [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5256af8a-bacc-22a6-3a91-e9367052053f/disk-0.vmdk is in state: ready. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1198.122394] env[68674]: DEBUG oslo_vmware.rw_handles [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5256af8a-bacc-22a6-3a91-e9367052053f/disk-0.vmdk. {{(pid=68674) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1198.122631] env[68674]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-23f79d65-e6b1-400e-9879-0bca94234018 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.170665] env[68674]: DEBUG oslo_concurrency.lockutils [req-8a82d9a5-6d54-4c54-bc7b-b50086dcba4b req-9c4e3bec-9961-4306-a37b-1233329e596d service nova] Releasing lock "refresh_cache-c2728961-9f06-4494-9c48-dd096eae8b4e" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1198.283117] env[68674]: DEBUG nova.compute.manager [None req-99982ab2-aeb0-49e1-b00d-2bdf637fc178 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1198.283421] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-99982ab2-aeb0-49e1-b00d-2bdf637fc178 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1198.284603] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce0aa605-6e6a-47f8-9d32-f25d7fbd673c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.294073] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-99982ab2-aeb0-49e1-b00d-2bdf637fc178 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1198.294419] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6dbf3176-3c04-4597-b520-467c3ab04c5c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.301800] env[68674]: DEBUG oslo_vmware.api [None req-99982ab2-aeb0-49e1-b00d-2bdf637fc178 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 1198.301800] env[68674]: value = "task-3241040" [ 1198.301800] env[68674]: _type = "Task" [ 1198.301800] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.314233] env[68674]: DEBUG oslo_vmware.api [None req-99982ab2-aeb0-49e1-b00d-2bdf637fc178 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3241040, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.411072] env[68674]: DEBUG oslo_vmware.rw_handles [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5256af8a-bacc-22a6-3a91-e9367052053f/disk-0.vmdk. {{(pid=68674) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1198.411252] env[68674]: INFO nova.virt.vmwareapi.images [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Downloaded image file data 62894887-4050-4053-af59-8bdae4e2d98c [ 1198.412492] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e8ee8b1-379a-461e-936c-e9cf168ac1f6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.435821] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e7a470c5-b713-4864-990f-ef9ddffa770b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.465527] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241039, 'name': CreateVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.527666] env[68674]: INFO nova.virt.vmwareapi.images [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] The imported VM was unregistered [ 1198.535394] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Caching image {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1198.535798] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Creating directory with path [datastore2] devstack-image-cache_base/62894887-4050-4053-af59-8bdae4e2d98c {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1198.536264] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-780c07dc-5acb-4431-a118-828e40726a05 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.555195] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Created directory with path [datastore2] devstack-image-cache_base/62894887-4050-4053-af59-8bdae4e2d98c {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1198.555581] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_7e87df1b-b0d2-4ed6-94ea-e7ff42d7a3ee/OSTACK_IMG_7e87df1b-b0d2-4ed6-94ea-e7ff42d7a3ee.vmdk to [datastore2] devstack-image-cache_base/62894887-4050-4053-af59-8bdae4e2d98c/62894887-4050-4053-af59-8bdae4e2d98c.vmdk. {{(pid=68674) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1198.555699] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-6f3ac4ec-3c40-4d77-b940-7ed27da1f703 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.564575] env[68674]: DEBUG oslo_vmware.api [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1198.564575] env[68674]: value = "task-3241042" [ 1198.564575] env[68674]: _type = "Task" [ 1198.564575] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.572667] env[68674]: DEBUG oslo_vmware.api [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3241042, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.811651] env[68674]: DEBUG oslo_vmware.api [None req-99982ab2-aeb0-49e1-b00d-2bdf637fc178 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3241040, 'name': PowerOffVM_Task, 'duration_secs': 0.350673} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.811651] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-99982ab2-aeb0-49e1-b00d-2bdf637fc178 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1198.811871] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-99982ab2-aeb0-49e1-b00d-2bdf637fc178 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1198.812131] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f613c98c-8d7c-455b-9c20-875dc7cc4aaa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.893197] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-99982ab2-aeb0-49e1-b00d-2bdf637fc178 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1198.893197] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-99982ab2-aeb0-49e1-b00d-2bdf637fc178 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1198.893197] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-99982ab2-aeb0-49e1-b00d-2bdf637fc178 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Deleting the datastore file [datastore1] 23891bad-1b63-4237-9243-78954cf67d52 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1198.893197] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7b6f8462-14e6-4f48-9bfd-97389c7c9762 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.900728] env[68674]: DEBUG oslo_vmware.api [None req-99982ab2-aeb0-49e1-b00d-2bdf637fc178 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for the task: (returnval){ [ 1198.900728] env[68674]: value = "task-3241044" [ 1198.900728] env[68674]: _type = "Task" [ 1198.900728] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.914345] env[68674]: DEBUG oslo_vmware.api [None req-99982ab2-aeb0-49e1-b00d-2bdf637fc178 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3241044, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.965329] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241039, 'name': CreateVM_Task, 'duration_secs': 1.482705} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.965504] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1198.966235] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1198.966628] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1198.967037] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1198.967143] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6e9185c-d83f-4fb5-88ad-5207bb7fe6bd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.971721] env[68674]: DEBUG oslo_vmware.api [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1198.971721] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527f41b0-892e-c1c7-ebd1-c90e28ce1623" [ 1198.971721] env[68674]: _type = "Task" [ 1198.971721] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.981701] env[68674]: DEBUG oslo_vmware.api [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527f41b0-892e-c1c7-ebd1-c90e28ce1623, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.076965] env[68674]: DEBUG oslo_vmware.api [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3241042, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.418733] env[68674]: DEBUG oslo_vmware.api [None req-99982ab2-aeb0-49e1-b00d-2bdf637fc178 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Task: {'id': task-3241044, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.439221} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.419022] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-99982ab2-aeb0-49e1-b00d-2bdf637fc178 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1199.419223] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-99982ab2-aeb0-49e1-b00d-2bdf637fc178 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1199.419408] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-99982ab2-aeb0-49e1-b00d-2bdf637fc178 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1199.419584] env[68674]: INFO nova.compute.manager [None req-99982ab2-aeb0-49e1-b00d-2bdf637fc178 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1199.419855] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-99982ab2-aeb0-49e1-b00d-2bdf637fc178 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1199.420076] env[68674]: DEBUG nova.compute.manager [-] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1199.420182] env[68674]: DEBUG nova.network.neutron [-] [instance: 23891bad-1b63-4237-9243-78954cf67d52] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1199.485978] env[68674]: DEBUG oslo_vmware.api [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]527f41b0-892e-c1c7-ebd1-c90e28ce1623, 'name': SearchDatastore_Task, 'duration_secs': 0.017681} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.486411] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1199.486664] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1199.487159] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1199.487325] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1199.487513] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1199.487812] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3212c448-9a04-452f-9301-84cba9ab6e41 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.511727] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1199.511920] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1199.512770] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-645ff603-44d5-4926-8fc2-7d4bcb813a5c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.521677] env[68674]: DEBUG oslo_vmware.api [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1199.521677] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528ac84e-903b-71eb-06fe-bcc8ee7c3b3e" [ 1199.521677] env[68674]: _type = "Task" [ 1199.521677] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.532742] env[68674]: DEBUG oslo_vmware.api [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528ac84e-903b-71eb-06fe-bcc8ee7c3b3e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.577498] env[68674]: DEBUG oslo_vmware.api [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3241042, 'name': MoveVirtualDisk_Task} progress is 29%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.750033] env[68674]: DEBUG nova.compute.manager [req-d0633b0f-192e-4d0a-a6e1-eb0316e22a65 req-010e3ebc-9ad8-4387-bab0-388122e801a3 service nova] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Received event network-vif-deleted-3660c8d4-d8be-4132-b92b-f96aa37b627b {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1199.750244] env[68674]: INFO nova.compute.manager [req-d0633b0f-192e-4d0a-a6e1-eb0316e22a65 req-010e3ebc-9ad8-4387-bab0-388122e801a3 service nova] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Neutron deleted interface 3660c8d4-d8be-4132-b92b-f96aa37b627b; detaching it from the instance and deleting it from the info cache [ 1199.750418] env[68674]: DEBUG nova.network.neutron [req-d0633b0f-192e-4d0a-a6e1-eb0316e22a65 req-010e3ebc-9ad8-4387-bab0-388122e801a3 service nova] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1199.802641] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fc29ae6e-bd43-4b01-8d34-2b3a787d47e2 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquiring lock "9b8aad00-0980-4752-954a-c09c9ae6f9ec" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1199.803041] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fc29ae6e-bd43-4b01-8d34-2b3a787d47e2 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lock "9b8aad00-0980-4752-954a-c09c9ae6f9ec" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1199.803362] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fc29ae6e-bd43-4b01-8d34-2b3a787d47e2 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquiring lock "9b8aad00-0980-4752-954a-c09c9ae6f9ec-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1199.803633] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fc29ae6e-bd43-4b01-8d34-2b3a787d47e2 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lock "9b8aad00-0980-4752-954a-c09c9ae6f9ec-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1199.803837] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fc29ae6e-bd43-4b01-8d34-2b3a787d47e2 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lock "9b8aad00-0980-4752-954a-c09c9ae6f9ec-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1199.808117] env[68674]: INFO nova.compute.manager [None req-fc29ae6e-bd43-4b01-8d34-2b3a787d47e2 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Terminating instance [ 1200.034067] env[68674]: DEBUG oslo_vmware.api [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]528ac84e-903b-71eb-06fe-bcc8ee7c3b3e, 'name': SearchDatastore_Task, 'duration_secs': 0.086934} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.035053] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69c8f430-dd77-4677-bb2a-dcd938097e1b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.044618] env[68674]: DEBUG oslo_vmware.api [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1200.044618] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5200d298-0b32-ae9d-87c0-b34aeca9a23b" [ 1200.044618] env[68674]: _type = "Task" [ 1200.044618] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.055830] env[68674]: DEBUG oslo_vmware.api [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5200d298-0b32-ae9d-87c0-b34aeca9a23b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.078085] env[68674]: DEBUG oslo_vmware.api [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3241042, 'name': MoveVirtualDisk_Task} progress is 49%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.228678] env[68674]: DEBUG nova.network.neutron [-] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1200.255583] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-067b1995-3bdb-450f-86f3-e341394e3e9e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.267988] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6b42a8b-2695-48c2-9dfb-250cefdc8bc1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.302542] env[68674]: DEBUG nova.compute.manager [req-d0633b0f-192e-4d0a-a6e1-eb0316e22a65 req-010e3ebc-9ad8-4387-bab0-388122e801a3 service nova] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Detach interface failed, port_id=3660c8d4-d8be-4132-b92b-f96aa37b627b, reason: Instance 23891bad-1b63-4237-9243-78954cf67d52 could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1200.316157] env[68674]: DEBUG nova.compute.manager [None req-fc29ae6e-bd43-4b01-8d34-2b3a787d47e2 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1200.316406] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-fc29ae6e-bd43-4b01-8d34-2b3a787d47e2 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1200.317299] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5213b80b-0378-40bf-a482-edfeb3b1285f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.327520] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc29ae6e-bd43-4b01-8d34-2b3a787d47e2 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1200.327793] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5a642bf5-3258-4c73-88ab-7b7f35f26812 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.336294] env[68674]: DEBUG oslo_vmware.api [None req-fc29ae6e-bd43-4b01-8d34-2b3a787d47e2 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1200.336294] env[68674]: value = "task-3241045" [ 1200.336294] env[68674]: _type = "Task" [ 1200.336294] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.347861] env[68674]: DEBUG oslo_vmware.api [None req-fc29ae6e-bd43-4b01-8d34-2b3a787d47e2 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3241045, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.398303] env[68674]: DEBUG oslo_concurrency.lockutils [None req-40beb7e8-2770-471e-a58e-b58b84dea13e tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquiring lock "ba4bfbb4-a89b-4ab6-964e-792647fd5a89" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1200.398654] env[68674]: DEBUG oslo_concurrency.lockutils [None req-40beb7e8-2770-471e-a58e-b58b84dea13e tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "ba4bfbb4-a89b-4ab6-964e-792647fd5a89" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1200.398877] env[68674]: DEBUG oslo_concurrency.lockutils [None req-40beb7e8-2770-471e-a58e-b58b84dea13e tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquiring lock "ba4bfbb4-a89b-4ab6-964e-792647fd5a89-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1200.399083] env[68674]: DEBUG oslo_concurrency.lockutils [None req-40beb7e8-2770-471e-a58e-b58b84dea13e tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "ba4bfbb4-a89b-4ab6-964e-792647fd5a89-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1200.399262] env[68674]: DEBUG oslo_concurrency.lockutils [None req-40beb7e8-2770-471e-a58e-b58b84dea13e tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "ba4bfbb4-a89b-4ab6-964e-792647fd5a89-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1200.401977] env[68674]: INFO nova.compute.manager [None req-40beb7e8-2770-471e-a58e-b58b84dea13e tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Terminating instance [ 1200.557029] env[68674]: DEBUG oslo_vmware.api [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5200d298-0b32-ae9d-87c0-b34aeca9a23b, 'name': SearchDatastore_Task, 'duration_secs': 0.092442} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.557354] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1200.557627] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] c2728961-9f06-4494-9c48-dd096eae8b4e/c2728961-9f06-4494-9c48-dd096eae8b4e.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1200.557900] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4eacde17-1248-47eb-b514-2140598bad6c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.568084] env[68674]: DEBUG oslo_vmware.api [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1200.568084] env[68674]: value = "task-3241046" [ 1200.568084] env[68674]: _type = "Task" [ 1200.568084] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.579503] env[68674]: DEBUG oslo_vmware.api [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241046, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.582579] env[68674]: DEBUG oslo_vmware.api [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3241042, 'name': MoveVirtualDisk_Task} progress is 71%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.733242] env[68674]: INFO nova.compute.manager [-] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Took 1.31 seconds to deallocate network for instance. [ 1200.849979] env[68674]: DEBUG oslo_vmware.api [None req-fc29ae6e-bd43-4b01-8d34-2b3a787d47e2 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3241045, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.906124] env[68674]: DEBUG nova.compute.manager [None req-40beb7e8-2770-471e-a58e-b58b84dea13e tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1200.906451] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-40beb7e8-2770-471e-a58e-b58b84dea13e tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1200.907406] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93887e46-3982-42e7-b9bb-914df92f7383 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.917577] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-40beb7e8-2770-471e-a58e-b58b84dea13e tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1200.917893] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-314ba4ca-cf96-413a-aec3-2076c46067d9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.926466] env[68674]: DEBUG oslo_vmware.api [None req-40beb7e8-2770-471e-a58e-b58b84dea13e tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 1200.926466] env[68674]: value = "task-3241047" [ 1200.926466] env[68674]: _type = "Task" [ 1200.926466] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.937520] env[68674]: DEBUG oslo_vmware.api [None req-40beb7e8-2770-471e-a58e-b58b84dea13e tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3241047, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.082774] env[68674]: DEBUG oslo_vmware.api [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241046, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.086074] env[68674]: DEBUG oslo_vmware.api [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3241042, 'name': MoveVirtualDisk_Task} progress is 91%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.238731] env[68674]: DEBUG oslo_concurrency.lockutils [None req-99982ab2-aeb0-49e1-b00d-2bdf637fc178 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1201.239039] env[68674]: DEBUG oslo_concurrency.lockutils [None req-99982ab2-aeb0-49e1-b00d-2bdf637fc178 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1201.239367] env[68674]: DEBUG nova.objects.instance [None req-99982ab2-aeb0-49e1-b00d-2bdf637fc178 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lazy-loading 'resources' on Instance uuid 23891bad-1b63-4237-9243-78954cf67d52 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1201.347409] env[68674]: DEBUG oslo_vmware.api [None req-fc29ae6e-bd43-4b01-8d34-2b3a787d47e2 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3241045, 'name': PowerOffVM_Task, 'duration_secs': 0.526189} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.347684] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc29ae6e-bd43-4b01-8d34-2b3a787d47e2 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1201.347856] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-fc29ae6e-bd43-4b01-8d34-2b3a787d47e2 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1201.348122] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c4f90375-226d-4032-8586-a8d70a503975 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.437360] env[68674]: DEBUG oslo_vmware.api [None req-40beb7e8-2770-471e-a58e-b58b84dea13e tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3241047, 'name': PowerOffVM_Task, 'duration_secs': 0.238822} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.437734] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-40beb7e8-2770-471e-a58e-b58b84dea13e tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1201.437916] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-40beb7e8-2770-471e-a58e-b58b84dea13e tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1201.438198] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bfb614d7-9a13-43a6-8791-aa016a1d2f7b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.440758] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-fc29ae6e-bd43-4b01-8d34-2b3a787d47e2 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1201.440957] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-fc29ae6e-bd43-4b01-8d34-2b3a787d47e2 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1201.441153] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc29ae6e-bd43-4b01-8d34-2b3a787d47e2 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Deleting the datastore file [datastore2] 9b8aad00-0980-4752-954a-c09c9ae6f9ec {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1201.441727] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-14fc27fd-28ad-4008-8f98-70193adbadd0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.448622] env[68674]: DEBUG oslo_vmware.api [None req-fc29ae6e-bd43-4b01-8d34-2b3a787d47e2 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for the task: (returnval){ [ 1201.448622] env[68674]: value = "task-3241050" [ 1201.448622] env[68674]: _type = "Task" [ 1201.448622] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.456125] env[68674]: DEBUG oslo_vmware.api [None req-fc29ae6e-bd43-4b01-8d34-2b3a787d47e2 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3241050, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.537425] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-40beb7e8-2770-471e-a58e-b58b84dea13e tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1201.537639] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-40beb7e8-2770-471e-a58e-b58b84dea13e tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1201.537823] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-40beb7e8-2770-471e-a58e-b58b84dea13e tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Deleting the datastore file [datastore1] ba4bfbb4-a89b-4ab6-964e-792647fd5a89 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1201.538113] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-25bcd6c0-f73c-4b5b-b39f-fe45f14be90a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.544324] env[68674]: DEBUG oslo_vmware.api [None req-40beb7e8-2770-471e-a58e-b58b84dea13e tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for the task: (returnval){ [ 1201.544324] env[68674]: value = "task-3241051" [ 1201.544324] env[68674]: _type = "Task" [ 1201.544324] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.551907] env[68674]: DEBUG oslo_vmware.api [None req-40beb7e8-2770-471e-a58e-b58b84dea13e tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3241051, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.580141] env[68674]: DEBUG oslo_vmware.api [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3241042, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.583018] env[68674]: DEBUG oslo_vmware.api [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241046, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.870097] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d5952bc-a7df-4d1a-b08f-74961262df77 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.881057] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fee320f-f1b0-41a4-9cc0-09efe55af2f2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.919727] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0240570-c71b-4bd2-a0c7-a33e962aa1fd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.928629] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-201b9dc5-dc3e-42f7-82cf-6031d1dc313b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.948896] env[68674]: DEBUG nova.compute.provider_tree [None req-99982ab2-aeb0-49e1-b00d-2bdf637fc178 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1201.962744] env[68674]: DEBUG oslo_vmware.api [None req-fc29ae6e-bd43-4b01-8d34-2b3a787d47e2 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3241050, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.055628] env[68674]: DEBUG oslo_vmware.api [None req-40beb7e8-2770-471e-a58e-b58b84dea13e tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3241051, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.082901] env[68674]: DEBUG oslo_vmware.api [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241046, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.087188] env[68674]: DEBUG oslo_vmware.api [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3241042, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.064086} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.087477] env[68674]: INFO nova.virt.vmwareapi.ds_util [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_7e87df1b-b0d2-4ed6-94ea-e7ff42d7a3ee/OSTACK_IMG_7e87df1b-b0d2-4ed6-94ea-e7ff42d7a3ee.vmdk to [datastore2] devstack-image-cache_base/62894887-4050-4053-af59-8bdae4e2d98c/62894887-4050-4053-af59-8bdae4e2d98c.vmdk. [ 1202.087706] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Cleaning up location [datastore2] OSTACK_IMG_7e87df1b-b0d2-4ed6-94ea-e7ff42d7a3ee {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1202.087880] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_7e87df1b-b0d2-4ed6-94ea-e7ff42d7a3ee {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1202.088155] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f3581b25-95ea-4488-beb7-18002b35afba {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.096976] env[68674]: DEBUG oslo_vmware.api [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1202.096976] env[68674]: value = "task-3241052" [ 1202.096976] env[68674]: _type = "Task" [ 1202.096976] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.107129] env[68674]: DEBUG oslo_vmware.api [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3241052, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.455737] env[68674]: DEBUG nova.scheduler.client.report [None req-99982ab2-aeb0-49e1-b00d-2bdf637fc178 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1202.464386] env[68674]: DEBUG oslo_vmware.api [None req-fc29ae6e-bd43-4b01-8d34-2b3a787d47e2 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Task: {'id': task-3241050, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.777551} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.464628] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc29ae6e-bd43-4b01-8d34-2b3a787d47e2 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1202.464807] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-fc29ae6e-bd43-4b01-8d34-2b3a787d47e2 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1202.464978] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-fc29ae6e-bd43-4b01-8d34-2b3a787d47e2 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1202.465166] env[68674]: INFO nova.compute.manager [None req-fc29ae6e-bd43-4b01-8d34-2b3a787d47e2 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Took 2.15 seconds to destroy the instance on the hypervisor. [ 1202.465409] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fc29ae6e-bd43-4b01-8d34-2b3a787d47e2 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1202.465603] env[68674]: DEBUG nova.compute.manager [-] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1202.465698] env[68674]: DEBUG nova.network.neutron [-] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1202.555463] env[68674]: DEBUG oslo_vmware.api [None req-40beb7e8-2770-471e-a58e-b58b84dea13e tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Task: {'id': task-3241051, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.631521} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.555725] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-40beb7e8-2770-471e-a58e-b58b84dea13e tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1202.555908] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-40beb7e8-2770-471e-a58e-b58b84dea13e tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1202.556174] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-40beb7e8-2770-471e-a58e-b58b84dea13e tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1202.556474] env[68674]: INFO nova.compute.manager [None req-40beb7e8-2770-471e-a58e-b58b84dea13e tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1202.556911] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-40beb7e8-2770-471e-a58e-b58b84dea13e tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1202.557022] env[68674]: DEBUG nova.compute.manager [-] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1202.557160] env[68674]: DEBUG nova.network.neutron [-] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1202.582729] env[68674]: DEBUG oslo_vmware.api [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241046, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.613662} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.583087] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] c2728961-9f06-4494-9c48-dd096eae8b4e/c2728961-9f06-4494-9c48-dd096eae8b4e.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1202.583196] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1202.583424] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-52ecdd71-fc1f-4ed2-8e7a-3a818e187e43 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.590852] env[68674]: DEBUG oslo_vmware.api [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1202.590852] env[68674]: value = "task-3241053" [ 1202.590852] env[68674]: _type = "Task" [ 1202.590852] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.599382] env[68674]: DEBUG oslo_vmware.api [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241053, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.608215] env[68674]: DEBUG oslo_vmware.api [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3241052, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.100332} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.608491] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1202.608660] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Releasing lock "[datastore2] devstack-image-cache_base/62894887-4050-4053-af59-8bdae4e2d98c/62894887-4050-4053-af59-8bdae4e2d98c.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1202.608946] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/62894887-4050-4053-af59-8bdae4e2d98c/62894887-4050-4053-af59-8bdae4e2d98c.vmdk to [datastore2] 8f183286-f908-4d05-9a61-d6b1bf10dfb9/8f183286-f908-4d05-9a61-d6b1bf10dfb9.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1202.609269] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7fb80e2c-2467-4ad5-a1e1-5efc0737b739 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.615879] env[68674]: DEBUG oslo_vmware.api [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1202.615879] env[68674]: value = "task-3241054" [ 1202.615879] env[68674]: _type = "Task" [ 1202.615879] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.623810] env[68674]: DEBUG oslo_vmware.api [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3241054, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.697407] env[68674]: DEBUG nova.compute.manager [req-0cc5f53b-26ed-459e-b3a8-2afe14d49e5b req-706362a3-5aa5-48a4-8fc2-8ccfc92769b9 service nova] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Received event network-vif-deleted-f751e885-e868-4e41-a9e7-de64b20c643c {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1202.697686] env[68674]: INFO nova.compute.manager [req-0cc5f53b-26ed-459e-b3a8-2afe14d49e5b req-706362a3-5aa5-48a4-8fc2-8ccfc92769b9 service nova] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Neutron deleted interface f751e885-e868-4e41-a9e7-de64b20c643c; detaching it from the instance and deleting it from the info cache [ 1202.697810] env[68674]: DEBUG nova.network.neutron [req-0cc5f53b-26ed-459e-b3a8-2afe14d49e5b req-706362a3-5aa5-48a4-8fc2-8ccfc92769b9 service nova] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1202.960611] env[68674]: DEBUG oslo_concurrency.lockutils [None req-99982ab2-aeb0-49e1-b00d-2bdf637fc178 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.721s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1202.991656] env[68674]: DEBUG nova.compute.manager [req-14b784bc-bdc0-4d23-bb31-835045fa182b req-a41f4b10-f724-4cb6-8ce8-f1a4b36443bc service nova] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Received event network-vif-deleted-7309bd2b-c077-4257-8efb-bf6e8d516ab7 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1202.991882] env[68674]: INFO nova.compute.manager [req-14b784bc-bdc0-4d23-bb31-835045fa182b req-a41f4b10-f724-4cb6-8ce8-f1a4b36443bc service nova] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Neutron deleted interface 7309bd2b-c077-4257-8efb-bf6e8d516ab7; detaching it from the instance and deleting it from the info cache [ 1202.992527] env[68674]: DEBUG nova.network.neutron [req-14b784bc-bdc0-4d23-bb31-835045fa182b req-a41f4b10-f724-4cb6-8ce8-f1a4b36443bc service nova] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1202.995028] env[68674]: INFO nova.scheduler.client.report [None req-99982ab2-aeb0-49e1-b00d-2bdf637fc178 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Deleted allocations for instance 23891bad-1b63-4237-9243-78954cf67d52 [ 1203.102245] env[68674]: DEBUG oslo_vmware.api [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241053, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078585} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.102649] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1203.103376] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46e42cc7-2fe4-4dac-93a3-0159367f7a37 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.126197] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Reconfiguring VM instance instance-0000007b to attach disk [datastore2] c2728961-9f06-4494-9c48-dd096eae8b4e/c2728961-9f06-4494-9c48-dd096eae8b4e.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1203.129800] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-64523e3e-684b-4b08-9eda-93a92664ed76 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.150833] env[68674]: DEBUG oslo_vmware.api [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3241054, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.152869] env[68674]: DEBUG oslo_vmware.api [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1203.152869] env[68674]: value = "task-3241055" [ 1203.152869] env[68674]: _type = "Task" [ 1203.152869] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.161588] env[68674]: DEBUG oslo_vmware.api [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241055, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.180559] env[68674]: DEBUG nova.network.neutron [-] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1203.201427] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-05ef71ec-a962-49f8-b470-a7b336f05c3b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.211789] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1db4941d-bb1d-4b87-966b-e5d2178776d1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.242800] env[68674]: DEBUG nova.compute.manager [req-0cc5f53b-26ed-459e-b3a8-2afe14d49e5b req-706362a3-5aa5-48a4-8fc2-8ccfc92769b9 service nova] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Detach interface failed, port_id=f751e885-e868-4e41-a9e7-de64b20c643c, reason: Instance 9b8aad00-0980-4752-954a-c09c9ae6f9ec could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1203.473041] env[68674]: DEBUG nova.network.neutron [-] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1203.503416] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fffab0f8-fce0-47e4-9eb0-b470e1edd956 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.509216] env[68674]: DEBUG oslo_concurrency.lockutils [None req-99982ab2-aeb0-49e1-b00d-2bdf637fc178 tempest-ServerActionsTestOtherA-986359527 tempest-ServerActionsTestOtherA-986359527-project-member] Lock "23891bad-1b63-4237-9243-78954cf67d52" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.733s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1203.519173] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6c941bf-ffab-4595-84f6-293bb429214b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.551691] env[68674]: DEBUG nova.compute.manager [req-14b784bc-bdc0-4d23-bb31-835045fa182b req-a41f4b10-f724-4cb6-8ce8-f1a4b36443bc service nova] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Detach interface failed, port_id=7309bd2b-c077-4257-8efb-bf6e8d516ab7, reason: Instance ba4bfbb4-a89b-4ab6-964e-792647fd5a89 could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1203.627761] env[68674]: DEBUG oslo_vmware.api [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3241054, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.664015] env[68674]: DEBUG oslo_vmware.api [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241055, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.683277] env[68674]: INFO nova.compute.manager [-] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Took 1.22 seconds to deallocate network for instance. [ 1203.976166] env[68674]: INFO nova.compute.manager [-] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Took 1.42 seconds to deallocate network for instance. [ 1204.128545] env[68674]: DEBUG oslo_vmware.api [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3241054, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.169971] env[68674]: DEBUG oslo_vmware.api [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241055, 'name': ReconfigVM_Task, 'duration_secs': 0.956001} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.170356] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Reconfigured VM instance instance-0000007b to attach disk [datastore2] c2728961-9f06-4494-9c48-dd096eae8b4e/c2728961-9f06-4494-9c48-dd096eae8b4e.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1204.171108] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ee2268d8-5ec4-4d7c-ab43-3767d0dd42da {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.181403] env[68674]: DEBUG oslo_vmware.api [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1204.181403] env[68674]: value = "task-3241056" [ 1204.181403] env[68674]: _type = "Task" [ 1204.181403] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.191149] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fc29ae6e-bd43-4b01-8d34-2b3a787d47e2 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1204.191493] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fc29ae6e-bd43-4b01-8d34-2b3a787d47e2 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1204.191716] env[68674]: DEBUG nova.objects.instance [None req-fc29ae6e-bd43-4b01-8d34-2b3a787d47e2 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lazy-loading 'resources' on Instance uuid 9b8aad00-0980-4752-954a-c09c9ae6f9ec {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1204.195466] env[68674]: DEBUG oslo_vmware.api [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241056, 'name': Rename_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.482885] env[68674]: DEBUG oslo_concurrency.lockutils [None req-40beb7e8-2770-471e-a58e-b58b84dea13e tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1204.628513] env[68674]: DEBUG oslo_vmware.api [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3241054, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.691532] env[68674]: DEBUG oslo_vmware.api [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241056, 'name': Rename_Task, 'duration_secs': 0.403628} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.695030] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1204.695030] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-69d21894-61f4-441f-80b8-059cf16fde0e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.704556] env[68674]: DEBUG oslo_vmware.api [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1204.704556] env[68674]: value = "task-3241057" [ 1204.704556] env[68674]: _type = "Task" [ 1204.704556] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.714941] env[68674]: DEBUG oslo_vmware.api [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241057, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.824084] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bacfdad8-1266-4219-a618-c7273ff9b03e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.833032] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94981869-cf08-43d0-9788-07534aaaf304 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.866478] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f37636c8-b8cf-4e4d-ac8e-e8c553e7847b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.875742] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5be59402-856b-4d77-a4e5-743d584775e9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.890956] env[68674]: DEBUG nova.compute.provider_tree [None req-fc29ae6e-bd43-4b01-8d34-2b3a787d47e2 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1205.128932] env[68674]: DEBUG oslo_vmware.api [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3241054, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.405777} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.130835] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/62894887-4050-4053-af59-8bdae4e2d98c/62894887-4050-4053-af59-8bdae4e2d98c.vmdk to [datastore2] 8f183286-f908-4d05-9a61-d6b1bf10dfb9/8f183286-f908-4d05-9a61-d6b1bf10dfb9.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1205.130835] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aef9380e-cd69-4e9a-9886-d890c3473e29 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.153329] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] 8f183286-f908-4d05-9a61-d6b1bf10dfb9/8f183286-f908-4d05-9a61-d6b1bf10dfb9.vmdk or device None with type streamOptimized {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1205.153645] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b4d6bfc3-4b5a-4ad3-aebb-f03646c5c839 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.173877] env[68674]: DEBUG oslo_vmware.api [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1205.173877] env[68674]: value = "task-3241058" [ 1205.173877] env[68674]: _type = "Task" [ 1205.173877] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.183836] env[68674]: DEBUG oslo_vmware.api [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3241058, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.214051] env[68674]: DEBUG oslo_vmware.api [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241057, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.395472] env[68674]: DEBUG nova.scheduler.client.report [None req-fc29ae6e-bd43-4b01-8d34-2b3a787d47e2 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1205.685600] env[68674]: DEBUG oslo_vmware.api [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3241058, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.715064] env[68674]: DEBUG oslo_vmware.api [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241057, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.901321] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fc29ae6e-bd43-4b01-8d34-2b3a787d47e2 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.710s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1205.903690] env[68674]: DEBUG oslo_concurrency.lockutils [None req-40beb7e8-2770-471e-a58e-b58b84dea13e tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.421s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1205.903900] env[68674]: DEBUG nova.objects.instance [None req-40beb7e8-2770-471e-a58e-b58b84dea13e tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lazy-loading 'resources' on Instance uuid ba4bfbb4-a89b-4ab6-964e-792647fd5a89 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1205.928940] env[68674]: INFO nova.scheduler.client.report [None req-fc29ae6e-bd43-4b01-8d34-2b3a787d47e2 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Deleted allocations for instance 9b8aad00-0980-4752-954a-c09c9ae6f9ec [ 1206.186672] env[68674]: DEBUG oslo_vmware.api [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3241058, 'name': ReconfigVM_Task, 'duration_secs': 0.751855} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.187011] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Reconfigured VM instance instance-00000073 to attach disk [datastore2] 8f183286-f908-4d05-9a61-d6b1bf10dfb9/8f183286-f908-4d05-9a61-d6b1bf10dfb9.vmdk or device None with type streamOptimized {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1206.189655] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-259e3bb6-7be1-40ab-bbde-9b158c2d0518 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.196534] env[68674]: DEBUG oslo_vmware.api [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1206.196534] env[68674]: value = "task-3241059" [ 1206.196534] env[68674]: _type = "Task" [ 1206.196534] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.204884] env[68674]: DEBUG oslo_vmware.api [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3241059, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.213259] env[68674]: DEBUG oslo_vmware.api [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241057, 'name': PowerOnVM_Task, 'duration_secs': 1.345856} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.213509] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1206.213711] env[68674]: INFO nova.compute.manager [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Took 11.14 seconds to spawn the instance on the hypervisor. [ 1206.213919] env[68674]: DEBUG nova.compute.manager [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1206.214693] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d553a7a-7b77-4974-9715-9e2765a6a11a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.442619] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fc29ae6e-bd43-4b01-8d34-2b3a787d47e2 tempest-ServersNegativeTestJSON-1761443692 tempest-ServersNegativeTestJSON-1761443692-project-member] Lock "9b8aad00-0980-4752-954a-c09c9ae6f9ec" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.639s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1206.516570] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29513526-11bd-4d7d-9d53-a2fb629675de {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.523972] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-033f42e4-8044-4991-91c1-670645ecba87 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.557903] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34d6ca66-9fbd-425e-b3ed-02b392a7df0d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.568355] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c781c8c6-7b1e-4d6f-82f8-fc7de3bf2835 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.579598] env[68674]: DEBUG nova.compute.provider_tree [None req-40beb7e8-2770-471e-a58e-b58b84dea13e tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1206.707070] env[68674]: DEBUG oslo_vmware.api [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3241059, 'name': Rename_Task, 'duration_secs': 0.391813} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.707557] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1206.707809] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-87a57c35-f549-412e-a231-1fea00a4364e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.713342] env[68674]: DEBUG oslo_vmware.api [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1206.713342] env[68674]: value = "task-3241060" [ 1206.713342] env[68674]: _type = "Task" [ 1206.713342] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.721507] env[68674]: DEBUG oslo_vmware.api [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3241060, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.732348] env[68674]: INFO nova.compute.manager [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Took 16.74 seconds to build instance. [ 1207.017630] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1b5b7f1-9682-4019-a9cd-d1006d4d5f23 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.024848] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-727a188a-d79b-4e4b-a9a1-063710bc3be4 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Suspending the VM {{(pid=68674) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1207.025110] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-47e70607-418f-4d30-a911-e3d1a173b6d5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.031584] env[68674]: DEBUG oslo_vmware.api [None req-727a188a-d79b-4e4b-a9a1-063710bc3be4 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1207.031584] env[68674]: value = "task-3241061" [ 1207.031584] env[68674]: _type = "Task" [ 1207.031584] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.038893] env[68674]: DEBUG oslo_vmware.api [None req-727a188a-d79b-4e4b-a9a1-063710bc3be4 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241061, 'name': SuspendVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.082679] env[68674]: DEBUG nova.scheduler.client.report [None req-40beb7e8-2770-471e-a58e-b58b84dea13e tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1207.223724] env[68674]: DEBUG oslo_vmware.api [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3241060, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.234873] env[68674]: DEBUG oslo_concurrency.lockutils [None req-dda1d8f0-aef0-46d3-99b7-7b80250240d7 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "c2728961-9f06-4494-9c48-dd096eae8b4e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.252s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1207.547147] env[68674]: DEBUG oslo_vmware.api [None req-727a188a-d79b-4e4b-a9a1-063710bc3be4 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241061, 'name': SuspendVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.591188] env[68674]: DEBUG oslo_concurrency.lockutils [None req-40beb7e8-2770-471e-a58e-b58b84dea13e tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.687s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1207.627199] env[68674]: INFO nova.scheduler.client.report [None req-40beb7e8-2770-471e-a58e-b58b84dea13e tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Deleted allocations for instance ba4bfbb4-a89b-4ab6-964e-792647fd5a89 [ 1207.725547] env[68674]: DEBUG oslo_vmware.api [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3241060, 'name': PowerOnVM_Task, 'duration_secs': 0.919011} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.725845] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1207.846049] env[68674]: DEBUG nova.compute.manager [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1207.846986] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23c377de-1496-451e-a17b-a67f5ae35404 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.047141] env[68674]: DEBUG oslo_vmware.api [None req-727a188a-d79b-4e4b-a9a1-063710bc3be4 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241061, 'name': SuspendVM_Task, 'duration_secs': 0.664868} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.047422] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-727a188a-d79b-4e4b-a9a1-063710bc3be4 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Suspended the VM {{(pid=68674) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1208.047601] env[68674]: DEBUG nova.compute.manager [None req-727a188a-d79b-4e4b-a9a1-063710bc3be4 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1208.048434] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-267856ed-97f3-4eb3-9daa-97ab9cfd746a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.138025] env[68674]: DEBUG oslo_concurrency.lockutils [None req-40beb7e8-2770-471e-a58e-b58b84dea13e tempest-AttachVolumeShelveTestJSON-1250685407 tempest-AttachVolumeShelveTestJSON-1250685407-project-member] Lock "ba4bfbb4-a89b-4ab6-964e-792647fd5a89" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.739s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1208.366831] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f7624f4e-5902-441e-a5f4-1a59b68fc316 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "8f183286-f908-4d05-9a61-d6b1bf10dfb9" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 23.924s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1209.211646] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Acquiring lock "531f935e-27e8-4b0d-b549-9693be7bff93" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.211882] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Lock "531f935e-27e8-4b0d-b549-9693be7bff93" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.632066] env[68674]: DEBUG oslo_concurrency.lockutils [None req-925ff2fc-f049-4006-a58e-b218601a33f6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "8f183286-f908-4d05-9a61-d6b1bf10dfb9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.632698] env[68674]: DEBUG oslo_concurrency.lockutils [None req-925ff2fc-f049-4006-a58e-b218601a33f6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "8f183286-f908-4d05-9a61-d6b1bf10dfb9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.632698] env[68674]: DEBUG oslo_concurrency.lockutils [None req-925ff2fc-f049-4006-a58e-b218601a33f6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "8f183286-f908-4d05-9a61-d6b1bf10dfb9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.632825] env[68674]: DEBUG oslo_concurrency.lockutils [None req-925ff2fc-f049-4006-a58e-b218601a33f6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "8f183286-f908-4d05-9a61-d6b1bf10dfb9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.632910] env[68674]: DEBUG oslo_concurrency.lockutils [None req-925ff2fc-f049-4006-a58e-b218601a33f6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "8f183286-f908-4d05-9a61-d6b1bf10dfb9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1209.635879] env[68674]: INFO nova.compute.manager [None req-925ff2fc-f049-4006-a58e-b218601a33f6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Terminating instance [ 1209.715069] env[68674]: DEBUG nova.compute.manager [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1209.805538] env[68674]: DEBUG oslo_concurrency.lockutils [None req-16d00593-c889-4669-9ef5-ad2d7ca31690 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "c2728961-9f06-4494-9c48-dd096eae8b4e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.805812] env[68674]: DEBUG oslo_concurrency.lockutils [None req-16d00593-c889-4669-9ef5-ad2d7ca31690 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "c2728961-9f06-4494-9c48-dd096eae8b4e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.806036] env[68674]: DEBUG oslo_concurrency.lockutils [None req-16d00593-c889-4669-9ef5-ad2d7ca31690 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "c2728961-9f06-4494-9c48-dd096eae8b4e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.806261] env[68674]: DEBUG oslo_concurrency.lockutils [None req-16d00593-c889-4669-9ef5-ad2d7ca31690 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "c2728961-9f06-4494-9c48-dd096eae8b4e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.806439] env[68674]: DEBUG oslo_concurrency.lockutils [None req-16d00593-c889-4669-9ef5-ad2d7ca31690 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "c2728961-9f06-4494-9c48-dd096eae8b4e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1209.808809] env[68674]: INFO nova.compute.manager [None req-16d00593-c889-4669-9ef5-ad2d7ca31690 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Terminating instance [ 1210.139590] env[68674]: DEBUG nova.compute.manager [None req-925ff2fc-f049-4006-a58e-b218601a33f6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1210.139839] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-925ff2fc-f049-4006-a58e-b218601a33f6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1210.140763] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfc87678-07a6-4223-8d28-83de6dbb3ce8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.148707] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-925ff2fc-f049-4006-a58e-b218601a33f6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1210.148965] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-06c2f56d-30fa-4ed7-bd45-87b38eb01031 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.156492] env[68674]: DEBUG oslo_vmware.api [None req-925ff2fc-f049-4006-a58e-b218601a33f6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1210.156492] env[68674]: value = "task-3241062" [ 1210.156492] env[68674]: _type = "Task" [ 1210.156492] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.164967] env[68674]: DEBUG oslo_vmware.api [None req-925ff2fc-f049-4006-a58e-b218601a33f6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3241062, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.239314] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1210.239636] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1210.241240] env[68674]: INFO nova.compute.claims [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1210.313114] env[68674]: DEBUG nova.compute.manager [None req-16d00593-c889-4669-9ef5-ad2d7ca31690 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1210.313455] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-16d00593-c889-4669-9ef5-ad2d7ca31690 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1210.316576] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0a82384-ec4c-4872-8736-0d1a780ed244 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.324980] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-16d00593-c889-4669-9ef5-ad2d7ca31690 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1210.325398] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-585aa909-8935-42e6-a599-f339c43c3b47 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.402057] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-16d00593-c889-4669-9ef5-ad2d7ca31690 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1210.402533] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-16d00593-c889-4669-9ef5-ad2d7ca31690 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1210.402533] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-16d00593-c889-4669-9ef5-ad2d7ca31690 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Deleting the datastore file [datastore2] c2728961-9f06-4494-9c48-dd096eae8b4e {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1210.402713] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-404699b5-28ed-47e8-864a-2fada6389cfe {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.410329] env[68674]: DEBUG oslo_vmware.api [None req-16d00593-c889-4669-9ef5-ad2d7ca31690 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1210.410329] env[68674]: value = "task-3241065" [ 1210.410329] env[68674]: _type = "Task" [ 1210.410329] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.418757] env[68674]: DEBUG oslo_vmware.api [None req-16d00593-c889-4669-9ef5-ad2d7ca31690 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241065, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.666903] env[68674]: DEBUG oslo_vmware.api [None req-925ff2fc-f049-4006-a58e-b218601a33f6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3241062, 'name': PowerOffVM_Task, 'duration_secs': 0.173027} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.667291] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-925ff2fc-f049-4006-a58e-b218601a33f6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1210.667444] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-925ff2fc-f049-4006-a58e-b218601a33f6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1210.668625] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-07912b7a-5fdb-4d5f-87f5-5780c53a253c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.737037] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-925ff2fc-f049-4006-a58e-b218601a33f6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1210.737266] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-925ff2fc-f049-4006-a58e-b218601a33f6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1210.737446] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-925ff2fc-f049-4006-a58e-b218601a33f6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Deleting the datastore file [datastore2] 8f183286-f908-4d05-9a61-d6b1bf10dfb9 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1210.737714] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-242bd4ab-8f3a-4af4-8721-1ffc3fc8594c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.744422] env[68674]: DEBUG oslo_vmware.api [None req-925ff2fc-f049-4006-a58e-b218601a33f6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for the task: (returnval){ [ 1210.744422] env[68674]: value = "task-3241067" [ 1210.744422] env[68674]: _type = "Task" [ 1210.744422] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.754309] env[68674]: DEBUG oslo_vmware.api [None req-925ff2fc-f049-4006-a58e-b218601a33f6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3241067, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.919365] env[68674]: DEBUG oslo_vmware.api [None req-16d00593-c889-4669-9ef5-ad2d7ca31690 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241065, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.186573} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.919613] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-16d00593-c889-4669-9ef5-ad2d7ca31690 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1210.919801] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-16d00593-c889-4669-9ef5-ad2d7ca31690 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1210.919995] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-16d00593-c889-4669-9ef5-ad2d7ca31690 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1210.920186] env[68674]: INFO nova.compute.manager [None req-16d00593-c889-4669-9ef5-ad2d7ca31690 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1210.920431] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-16d00593-c889-4669-9ef5-ad2d7ca31690 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1210.920623] env[68674]: DEBUG nova.compute.manager [-] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1210.920717] env[68674]: DEBUG nova.network.neutron [-] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1211.154218] env[68674]: DEBUG nova.compute.manager [req-9cef477b-3835-4a20-a082-2930e08b3a40 req-69029ca6-93cc-4114-b32d-d01066cdf007 service nova] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Received event network-vif-deleted-6fd40f2b-0d7b-41d0-bb6e-a3b878a8ee66 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1211.154447] env[68674]: INFO nova.compute.manager [req-9cef477b-3835-4a20-a082-2930e08b3a40 req-69029ca6-93cc-4114-b32d-d01066cdf007 service nova] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Neutron deleted interface 6fd40f2b-0d7b-41d0-bb6e-a3b878a8ee66; detaching it from the instance and deleting it from the info cache [ 1211.154625] env[68674]: DEBUG nova.network.neutron [req-9cef477b-3835-4a20-a082-2930e08b3a40 req-69029ca6-93cc-4114-b32d-d01066cdf007 service nova] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1211.256542] env[68674]: DEBUG oslo_vmware.api [None req-925ff2fc-f049-4006-a58e-b218601a33f6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Task: {'id': task-3241067, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150949} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.256769] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-925ff2fc-f049-4006-a58e-b218601a33f6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1211.257287] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-925ff2fc-f049-4006-a58e-b218601a33f6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1211.257287] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-925ff2fc-f049-4006-a58e-b218601a33f6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1211.257287] env[68674]: INFO nova.compute.manager [None req-925ff2fc-f049-4006-a58e-b218601a33f6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1211.257526] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-925ff2fc-f049-4006-a58e-b218601a33f6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1211.257709] env[68674]: DEBUG nova.compute.manager [-] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1211.257804] env[68674]: DEBUG nova.network.neutron [-] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1211.346960] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b073eaf-7dbd-481e-abd5-80a0f89bf698 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.355552] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b01dd56c-5cc6-40f6-a87a-4d1845259843 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.386436] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0ef51b7-80bc-45a4-b298-36ff68b5dd92 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.393054] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c0ba8e8-b99d-4c82-8215-bfd6e33b634f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.407065] env[68674]: DEBUG nova.compute.provider_tree [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1211.640221] env[68674]: DEBUG nova.network.neutron [-] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1211.658087] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2dd30a62-847a-46d0-a4a6-6704bb11eeb3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.667447] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97c29b91-4864-450a-8295-1fca60937b71 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.696634] env[68674]: DEBUG nova.compute.manager [req-9cef477b-3835-4a20-a082-2930e08b3a40 req-69029ca6-93cc-4114-b32d-d01066cdf007 service nova] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Detach interface failed, port_id=6fd40f2b-0d7b-41d0-bb6e-a3b878a8ee66, reason: Instance c2728961-9f06-4494-9c48-dd096eae8b4e could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1211.912026] env[68674]: DEBUG nova.scheduler.client.report [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1212.142594] env[68674]: INFO nova.compute.manager [-] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Took 1.22 seconds to deallocate network for instance. [ 1212.180234] env[68674]: DEBUG nova.network.neutron [-] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1212.414023] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.174s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1212.414676] env[68674]: DEBUG nova.compute.manager [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1212.649461] env[68674]: DEBUG oslo_concurrency.lockutils [None req-16d00593-c889-4669-9ef5-ad2d7ca31690 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1212.649856] env[68674]: DEBUG oslo_concurrency.lockutils [None req-16d00593-c889-4669-9ef5-ad2d7ca31690 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1212.650135] env[68674]: DEBUG nova.objects.instance [None req-16d00593-c889-4669-9ef5-ad2d7ca31690 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lazy-loading 'resources' on Instance uuid c2728961-9f06-4494-9c48-dd096eae8b4e {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1212.682786] env[68674]: INFO nova.compute.manager [-] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Took 1.42 seconds to deallocate network for instance. [ 1212.919339] env[68674]: DEBUG nova.compute.utils [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1212.920725] env[68674]: DEBUG nova.compute.manager [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1212.920970] env[68674]: DEBUG nova.network.neutron [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1212.968634] env[68674]: DEBUG nova.policy [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '73a90c2c27924299b1646ef28c3ea2ff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '57064972d31a4daab2afe07c07e97cd5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 1213.183178] env[68674]: DEBUG nova.compute.manager [req-b019f5f4-15b9-4e37-961c-74ceea64ac41 req-6848f762-a3f1-4f3a-b526-059451e1b71f service nova] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Received event network-vif-deleted-0f444395-3a03-4d13-9c2e-20a2965fcb9b {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1213.188470] env[68674]: DEBUG oslo_concurrency.lockutils [None req-925ff2fc-f049-4006-a58e-b218601a33f6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1213.212694] env[68674]: DEBUG nova.network.neutron [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Successfully created port: f2acd622-62cd-45c3-b22b-33d482cc9598 {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1213.264682] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1922c76b-7928-4590-aec2-be46440dbda5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.272591] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-799a363f-5661-46c1-b0ad-3a063ec71cf4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.302816] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82676ba3-5410-4584-af2f-1b19b735ed00 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.309781] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf951cf4-d037-439f-8489-7ad0631c8a5a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.322712] env[68674]: DEBUG nova.compute.provider_tree [None req-16d00593-c889-4669-9ef5-ad2d7ca31690 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1213.424501] env[68674]: DEBUG nova.compute.manager [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1213.828913] env[68674]: DEBUG nova.scheduler.client.report [None req-16d00593-c889-4669-9ef5-ad2d7ca31690 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1214.333748] env[68674]: DEBUG oslo_concurrency.lockutils [None req-16d00593-c889-4669-9ef5-ad2d7ca31690 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.684s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1214.337134] env[68674]: DEBUG oslo_concurrency.lockutils [None req-925ff2fc-f049-4006-a58e-b218601a33f6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.148s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1214.337426] env[68674]: DEBUG nova.objects.instance [None req-925ff2fc-f049-4006-a58e-b218601a33f6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lazy-loading 'resources' on Instance uuid 8f183286-f908-4d05-9a61-d6b1bf10dfb9 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1214.367529] env[68674]: INFO nova.scheduler.client.report [None req-16d00593-c889-4669-9ef5-ad2d7ca31690 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Deleted allocations for instance c2728961-9f06-4494-9c48-dd096eae8b4e [ 1214.433816] env[68674]: DEBUG nova.compute.manager [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1214.466996] env[68674]: DEBUG nova.virt.hardware [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1214.467337] env[68674]: DEBUG nova.virt.hardware [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1214.467575] env[68674]: DEBUG nova.virt.hardware [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1214.467819] env[68674]: DEBUG nova.virt.hardware [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1214.468014] env[68674]: DEBUG nova.virt.hardware [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1214.468236] env[68674]: DEBUG nova.virt.hardware [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1214.468508] env[68674]: DEBUG nova.virt.hardware [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1214.468733] env[68674]: DEBUG nova.virt.hardware [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1214.469033] env[68674]: DEBUG nova.virt.hardware [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1214.469314] env[68674]: DEBUG nova.virt.hardware [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1214.469571] env[68674]: DEBUG nova.virt.hardware [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1214.470524] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08f50298-73dc-40d6-afc4-a9ede67535f1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.479622] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ea74e39-94e5-4110-9946-dc3093a8305c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.878059] env[68674]: DEBUG oslo_concurrency.lockutils [None req-16d00593-c889-4669-9ef5-ad2d7ca31690 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "c2728961-9f06-4494-9c48-dd096eae8b4e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.072s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1214.946283] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e9bc5f7-483a-4260-a31d-4f2dfc20bb43 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.954196] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1da81574-addc-46b4-9cf6-e78e688c08c7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.989132] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2af5dbd3-231d-44a3-a9b5-603135d785b5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.999587] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53545ba9-a391-4124-ab18-f15f40cbccc9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.012178] env[68674]: DEBUG nova.compute.provider_tree [None req-925ff2fc-f049-4006-a58e-b218601a33f6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1215.060770] env[68674]: DEBUG nova.network.neutron [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Successfully updated port: f2acd622-62cd-45c3-b22b-33d482cc9598 {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1215.216600] env[68674]: DEBUG nova.compute.manager [req-96a6f8e3-f7c5-42ee-b684-b79c60995b8e req-a66fa926-8fc4-4725-8ae1-1d2854394800 service nova] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Received event network-vif-plugged-f2acd622-62cd-45c3-b22b-33d482cc9598 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1215.216600] env[68674]: DEBUG oslo_concurrency.lockutils [req-96a6f8e3-f7c5-42ee-b684-b79c60995b8e req-a66fa926-8fc4-4725-8ae1-1d2854394800 service nova] Acquiring lock "531f935e-27e8-4b0d-b549-9693be7bff93-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1215.216600] env[68674]: DEBUG oslo_concurrency.lockutils [req-96a6f8e3-f7c5-42ee-b684-b79c60995b8e req-a66fa926-8fc4-4725-8ae1-1d2854394800 service nova] Lock "531f935e-27e8-4b0d-b549-9693be7bff93-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1215.216600] env[68674]: DEBUG oslo_concurrency.lockutils [req-96a6f8e3-f7c5-42ee-b684-b79c60995b8e req-a66fa926-8fc4-4725-8ae1-1d2854394800 service nova] Lock "531f935e-27e8-4b0d-b549-9693be7bff93-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1215.216600] env[68674]: DEBUG nova.compute.manager [req-96a6f8e3-f7c5-42ee-b684-b79c60995b8e req-a66fa926-8fc4-4725-8ae1-1d2854394800 service nova] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] No waiting events found dispatching network-vif-plugged-f2acd622-62cd-45c3-b22b-33d482cc9598 {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1215.216600] env[68674]: WARNING nova.compute.manager [req-96a6f8e3-f7c5-42ee-b684-b79c60995b8e req-a66fa926-8fc4-4725-8ae1-1d2854394800 service nova] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Received unexpected event network-vif-plugged-f2acd622-62cd-45c3-b22b-33d482cc9598 for instance with vm_state building and task_state spawning. [ 1215.216600] env[68674]: DEBUG nova.compute.manager [req-96a6f8e3-f7c5-42ee-b684-b79c60995b8e req-a66fa926-8fc4-4725-8ae1-1d2854394800 service nova] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Received event network-changed-f2acd622-62cd-45c3-b22b-33d482cc9598 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1215.216600] env[68674]: DEBUG nova.compute.manager [req-96a6f8e3-f7c5-42ee-b684-b79c60995b8e req-a66fa926-8fc4-4725-8ae1-1d2854394800 service nova] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Refreshing instance network info cache due to event network-changed-f2acd622-62cd-45c3-b22b-33d482cc9598. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1215.216600] env[68674]: DEBUG oslo_concurrency.lockutils [req-96a6f8e3-f7c5-42ee-b684-b79c60995b8e req-a66fa926-8fc4-4725-8ae1-1d2854394800 service nova] Acquiring lock "refresh_cache-531f935e-27e8-4b0d-b549-9693be7bff93" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1215.217378] env[68674]: DEBUG oslo_concurrency.lockutils [req-96a6f8e3-f7c5-42ee-b684-b79c60995b8e req-a66fa926-8fc4-4725-8ae1-1d2854394800 service nova] Acquired lock "refresh_cache-531f935e-27e8-4b0d-b549-9693be7bff93" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1215.217742] env[68674]: DEBUG nova.network.neutron [req-96a6f8e3-f7c5-42ee-b684-b79c60995b8e req-a66fa926-8fc4-4725-8ae1-1d2854394800 service nova] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Refreshing network info cache for port f2acd622-62cd-45c3-b22b-33d482cc9598 {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1215.515260] env[68674]: DEBUG nova.scheduler.client.report [None req-925ff2fc-f049-4006-a58e-b218601a33f6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1215.563147] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Acquiring lock "refresh_cache-531f935e-27e8-4b0d-b549-9693be7bff93" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1215.752083] env[68674]: DEBUG nova.network.neutron [req-96a6f8e3-f7c5-42ee-b684-b79c60995b8e req-a66fa926-8fc4-4725-8ae1-1d2854394800 service nova] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1215.825032] env[68674]: DEBUG nova.network.neutron [req-96a6f8e3-f7c5-42ee-b684-b79c60995b8e req-a66fa926-8fc4-4725-8ae1-1d2854394800 service nova] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1215.929937] env[68674]: DEBUG oslo_concurrency.lockutils [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "2f698e5c-6be5-4747-b006-6ed6dd512f79" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1215.930253] env[68674]: DEBUG oslo_concurrency.lockutils [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "2f698e5c-6be5-4747-b006-6ed6dd512f79" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1216.020882] env[68674]: DEBUG oslo_concurrency.lockutils [None req-925ff2fc-f049-4006-a58e-b218601a33f6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.684s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1216.043871] env[68674]: INFO nova.scheduler.client.report [None req-925ff2fc-f049-4006-a58e-b218601a33f6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Deleted allocations for instance 8f183286-f908-4d05-9a61-d6b1bf10dfb9 [ 1216.331189] env[68674]: DEBUG oslo_concurrency.lockutils [req-96a6f8e3-f7c5-42ee-b684-b79c60995b8e req-a66fa926-8fc4-4725-8ae1-1d2854394800 service nova] Releasing lock "refresh_cache-531f935e-27e8-4b0d-b549-9693be7bff93" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1216.331543] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Acquired lock "refresh_cache-531f935e-27e8-4b0d-b549-9693be7bff93" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1216.331709] env[68674]: DEBUG nova.network.neutron [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1216.432452] env[68674]: DEBUG nova.compute.manager [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1216.552717] env[68674]: DEBUG oslo_concurrency.lockutils [None req-925ff2fc-f049-4006-a58e-b218601a33f6 tempest-ServerActionsTestOtherB-589403679 tempest-ServerActionsTestOtherB-589403679-project-member] Lock "8f183286-f908-4d05-9a61-d6b1bf10dfb9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.920s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1216.862104] env[68674]: DEBUG nova.network.neutron [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1216.953047] env[68674]: DEBUG oslo_concurrency.lockutils [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1216.953345] env[68674]: DEBUG oslo_concurrency.lockutils [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1216.954897] env[68674]: INFO nova.compute.claims [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1217.001039] env[68674]: DEBUG nova.network.neutron [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Updating instance_info_cache with network_info: [{"id": "f2acd622-62cd-45c3-b22b-33d482cc9598", "address": "fa:16:3e:63:be:b1", "network": {"id": "4e37aa78-13ee-4b6e-bf37-6a92559b2fe8", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-49897432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57064972d31a4daab2afe07c07e97cd5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c8dbe25-bca7-4d91-b577-193b8b2aad8d", "external-id": "nsx-vlan-transportzone-643", "segmentation_id": 643, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2acd622-62", "ovs_interfaceid": "f2acd622-62cd-45c3-b22b-33d482cc9598", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1217.504049] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Releasing lock "refresh_cache-531f935e-27e8-4b0d-b549-9693be7bff93" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1217.504199] env[68674]: DEBUG nova.compute.manager [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Instance network_info: |[{"id": "f2acd622-62cd-45c3-b22b-33d482cc9598", "address": "fa:16:3e:63:be:b1", "network": {"id": "4e37aa78-13ee-4b6e-bf37-6a92559b2fe8", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-49897432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57064972d31a4daab2afe07c07e97cd5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c8dbe25-bca7-4d91-b577-193b8b2aad8d", "external-id": "nsx-vlan-transportzone-643", "segmentation_id": 643, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2acd622-62", "ovs_interfaceid": "f2acd622-62cd-45c3-b22b-33d482cc9598", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1217.504638] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:be:b1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5c8dbe25-bca7-4d91-b577-193b8b2aad8d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f2acd622-62cd-45c3-b22b-33d482cc9598', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1217.512302] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Creating folder: Project (57064972d31a4daab2afe07c07e97cd5). Parent ref: group-v647377. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1217.512580] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1081c916-2e96-44d7-a4f1-afabb8dca033 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.524337] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Created folder: Project (57064972d31a4daab2afe07c07e97cd5) in parent group-v647377. [ 1217.524525] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Creating folder: Instances. Parent ref: group-v647720. {{(pid=68674) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1217.524757] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-78d58cbf-b116-461d-90ec-0de881232027 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.534036] env[68674]: INFO nova.virt.vmwareapi.vm_util [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Created folder: Instances in parent group-v647720. [ 1217.534292] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1217.534483] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1217.534972] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1f3777d8-4a44-4556-bd83-e6d334cdc921 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.554642] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1217.554642] env[68674]: value = "task-3241071" [ 1217.554642] env[68674]: _type = "Task" [ 1217.554642] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.561817] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241071, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.050991] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f33e92f8-7e42-4c30-89cd-9e3c677c22b7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.061240] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c23ac30-6450-4d87-9877-bd7a5137466b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.066884] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241071, 'name': CreateVM_Task, 'duration_secs': 0.321349} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.067419] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1218.068116] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1218.068288] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1218.068619] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1218.068873] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7b215c9-c255-4a08-b7bd-cf9d3f143fd3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.094543] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1df70c0a-ac17-4e97-8ed2-fe0eff627176 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.098372] env[68674]: DEBUG oslo_vmware.api [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Waiting for the task: (returnval){ [ 1218.098372] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52cb6d89-9049-cddf-ddd6-6d77e46ebec1" [ 1218.098372] env[68674]: _type = "Task" [ 1218.098372] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.105184] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0e21eeb-b200-4ee2-a347-1879b2adf239 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.111860] env[68674]: DEBUG oslo_vmware.api [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52cb6d89-9049-cddf-ddd6-6d77e46ebec1, 'name': SearchDatastore_Task, 'duration_secs': 0.009366} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.112406] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1218.112662] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1218.112869] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1218.113026] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1218.113211] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1218.113436] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f6a01030-cfca-4291-b4c1-311817aad6a5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.122487] env[68674]: DEBUG nova.compute.provider_tree [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1218.129405] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1218.129405] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1218.130528] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-983fdf16-1568-4ad4-8bb7-a0379dae893b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.135892] env[68674]: DEBUG oslo_vmware.api [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Waiting for the task: (returnval){ [ 1218.135892] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5297fc60-bf21-a382-d25e-90e32a304fd5" [ 1218.135892] env[68674]: _type = "Task" [ 1218.135892] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.143795] env[68674]: DEBUG oslo_vmware.api [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5297fc60-bf21-a382-d25e-90e32a304fd5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.625914] env[68674]: DEBUG nova.scheduler.client.report [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1218.648252] env[68674]: DEBUG oslo_vmware.api [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5297fc60-bf21-a382-d25e-90e32a304fd5, 'name': SearchDatastore_Task, 'duration_secs': 0.009489} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.649010] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be619ab2-d493-4048-a59b-7a73a128ed08 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.653826] env[68674]: DEBUG oslo_vmware.api [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Waiting for the task: (returnval){ [ 1218.653826] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52bdc7ff-e633-6298-8cd5-7d480ebc9174" [ 1218.653826] env[68674]: _type = "Task" [ 1218.653826] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.661225] env[68674]: DEBUG oslo_vmware.api [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52bdc7ff-e633-6298-8cd5-7d480ebc9174, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.131759] env[68674]: DEBUG oslo_concurrency.lockutils [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.178s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1219.131759] env[68674]: DEBUG nova.compute.manager [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1219.164617] env[68674]: DEBUG oslo_vmware.api [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52bdc7ff-e633-6298-8cd5-7d480ebc9174, 'name': SearchDatastore_Task, 'duration_secs': 0.010328} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.165191] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1219.165495] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 531f935e-27e8-4b0d-b549-9693be7bff93/531f935e-27e8-4b0d-b549-9693be7bff93.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1219.166370] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3fb9a2bd-92de-4ddf-bbfd-8f1672f105d3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.174077] env[68674]: DEBUG oslo_vmware.api [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Waiting for the task: (returnval){ [ 1219.174077] env[68674]: value = "task-3241072" [ 1219.174077] env[68674]: _type = "Task" [ 1219.174077] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.181820] env[68674]: DEBUG oslo_vmware.api [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Task: {'id': task-3241072, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.636604] env[68674]: DEBUG nova.compute.utils [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1219.638189] env[68674]: DEBUG nova.compute.manager [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1219.638409] env[68674]: DEBUG nova.network.neutron [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1219.675915] env[68674]: DEBUG nova.policy [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd28e9b76e01f463bbb375cbd9c51684f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '81afe76c94de4e94b53f15af0ef95e66', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 1219.686099] env[68674]: DEBUG oslo_vmware.api [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Task: {'id': task-3241072, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.466599} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.686424] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore1] 531f935e-27e8-4b0d-b549-9693be7bff93/531f935e-27e8-4b0d-b549-9693be7bff93.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1219.686653] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1219.686892] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6824c10c-d61d-42f3-a137-a8d21f5e51c0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.692524] env[68674]: DEBUG oslo_vmware.api [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Waiting for the task: (returnval){ [ 1219.692524] env[68674]: value = "task-3241073" [ 1219.692524] env[68674]: _type = "Task" [ 1219.692524] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.699552] env[68674]: DEBUG oslo_vmware.api [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Task: {'id': task-3241073, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.906600] env[68674]: DEBUG nova.network.neutron [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Successfully created port: fe2b8729-9ef5-4a98-b38f-405833365c1c {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1220.141517] env[68674]: DEBUG nova.compute.manager [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1220.202498] env[68674]: DEBUG oslo_vmware.api [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Task: {'id': task-3241073, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059355} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.202789] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1220.203573] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba41cff2-21de-4aa4-a7c5-c13e4d411a2d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.225601] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] 531f935e-27e8-4b0d-b549-9693be7bff93/531f935e-27e8-4b0d-b549-9693be7bff93.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1220.226095] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-621e53a6-9efd-4167-82c6-26dd27e9423e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.244494] env[68674]: DEBUG oslo_vmware.api [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Waiting for the task: (returnval){ [ 1220.244494] env[68674]: value = "task-3241074" [ 1220.244494] env[68674]: _type = "Task" [ 1220.244494] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.252061] env[68674]: DEBUG oslo_vmware.api [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Task: {'id': task-3241074, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.756554] env[68674]: DEBUG oslo_vmware.api [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Task: {'id': task-3241074, 'name': ReconfigVM_Task, 'duration_secs': 0.331288} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.756828] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Reconfigured VM instance instance-0000007c to attach disk [datastore1] 531f935e-27e8-4b0d-b549-9693be7bff93/531f935e-27e8-4b0d-b549-9693be7bff93.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1220.757573] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-05bc63dd-3a9c-4782-a94a-1756b1b9001d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.763712] env[68674]: DEBUG oslo_vmware.api [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Waiting for the task: (returnval){ [ 1220.763712] env[68674]: value = "task-3241075" [ 1220.763712] env[68674]: _type = "Task" [ 1220.763712] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.771464] env[68674]: DEBUG oslo_vmware.api [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Task: {'id': task-3241075, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.151242] env[68674]: DEBUG nova.compute.manager [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1221.183531] env[68674]: DEBUG nova.virt.hardware [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1221.183810] env[68674]: DEBUG nova.virt.hardware [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1221.184023] env[68674]: DEBUG nova.virt.hardware [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1221.184254] env[68674]: DEBUG nova.virt.hardware [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1221.184417] env[68674]: DEBUG nova.virt.hardware [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1221.184597] env[68674]: DEBUG nova.virt.hardware [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1221.185319] env[68674]: DEBUG nova.virt.hardware [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1221.185603] env[68674]: DEBUG nova.virt.hardware [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1221.185810] env[68674]: DEBUG nova.virt.hardware [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1221.186008] env[68674]: DEBUG nova.virt.hardware [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1221.186209] env[68674]: DEBUG nova.virt.hardware [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1221.187135] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52791026-18ab-4f4f-b24d-28ca072ebd57 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.195625] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12e5b511-2512-4203-8ea0-5cc758c460de {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.273323] env[68674]: DEBUG oslo_vmware.api [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Task: {'id': task-3241075, 'name': Rename_Task, 'duration_secs': 0.138316} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.274771] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1221.275811] env[68674]: DEBUG nova.compute.manager [req-41a841db-d53e-4171-9ce4-08fb3625197e req-2097a3c1-e9a5-4dd8-a2e0-2d42d88cf3af service nova] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Received event network-vif-plugged-fe2b8729-9ef5-4a98-b38f-405833365c1c {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1221.276024] env[68674]: DEBUG oslo_concurrency.lockutils [req-41a841db-d53e-4171-9ce4-08fb3625197e req-2097a3c1-e9a5-4dd8-a2e0-2d42d88cf3af service nova] Acquiring lock "2f698e5c-6be5-4747-b006-6ed6dd512f79-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1221.276232] env[68674]: DEBUG oslo_concurrency.lockutils [req-41a841db-d53e-4171-9ce4-08fb3625197e req-2097a3c1-e9a5-4dd8-a2e0-2d42d88cf3af service nova] Lock "2f698e5c-6be5-4747-b006-6ed6dd512f79-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1221.276451] env[68674]: DEBUG oslo_concurrency.lockutils [req-41a841db-d53e-4171-9ce4-08fb3625197e req-2097a3c1-e9a5-4dd8-a2e0-2d42d88cf3af service nova] Lock "2f698e5c-6be5-4747-b006-6ed6dd512f79-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1221.276621] env[68674]: DEBUG nova.compute.manager [req-41a841db-d53e-4171-9ce4-08fb3625197e req-2097a3c1-e9a5-4dd8-a2e0-2d42d88cf3af service nova] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] No waiting events found dispatching network-vif-plugged-fe2b8729-9ef5-4a98-b38f-405833365c1c {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1221.276841] env[68674]: WARNING nova.compute.manager [req-41a841db-d53e-4171-9ce4-08fb3625197e req-2097a3c1-e9a5-4dd8-a2e0-2d42d88cf3af service nova] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Received unexpected event network-vif-plugged-fe2b8729-9ef5-4a98-b38f-405833365c1c for instance with vm_state building and task_state spawning. [ 1221.277078] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-acc7fcb5-5564-4d98-8425-f4d31c2b0c14 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.284676] env[68674]: DEBUG oslo_vmware.api [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Waiting for the task: (returnval){ [ 1221.284676] env[68674]: value = "task-3241076" [ 1221.284676] env[68674]: _type = "Task" [ 1221.284676] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.292976] env[68674]: DEBUG oslo_vmware.api [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Task: {'id': task-3241076, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.394329] env[68674]: DEBUG nova.network.neutron [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Successfully updated port: fe2b8729-9ef5-4a98-b38f-405833365c1c {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1221.794828] env[68674]: DEBUG oslo_vmware.api [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Task: {'id': task-3241076, 'name': PowerOnVM_Task, 'duration_secs': 0.471583} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.795142] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1221.795378] env[68674]: INFO nova.compute.manager [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Took 7.36 seconds to spawn the instance on the hypervisor. [ 1221.795584] env[68674]: DEBUG nova.compute.manager [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1221.796824] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8744bcb4-966e-4dd1-815b-8bfaee8d617e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.901869] env[68674]: DEBUG oslo_concurrency.lockutils [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "refresh_cache-2f698e5c-6be5-4747-b006-6ed6dd512f79" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1221.901869] env[68674]: DEBUG oslo_concurrency.lockutils [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquired lock "refresh_cache-2f698e5c-6be5-4747-b006-6ed6dd512f79" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1221.901869] env[68674]: DEBUG nova.network.neutron [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1222.315874] env[68674]: INFO nova.compute.manager [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Took 12.10 seconds to build instance. [ 1222.432172] env[68674]: DEBUG nova.network.neutron [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1222.567680] env[68674]: DEBUG nova.network.neutron [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Updating instance_info_cache with network_info: [{"id": "fe2b8729-9ef5-4a98-b38f-405833365c1c", "address": "fa:16:3e:d2:2a:2b", "network": {"id": "14f41484-287c-4789-9e0c-fcc5f0e92e0d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-787923662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81afe76c94de4e94b53f15af0ef95e66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "877ed63d-906e-4bd5-a1fc-7e82d172d41e", "external-id": "nsx-vlan-transportzone-642", "segmentation_id": 642, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe2b8729-9e", "ovs_interfaceid": "fe2b8729-9ef5-4a98-b38f-405833365c1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1222.729845] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ad9ad74a-05c7-4ba8-9498-0849b180aa10 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Acquiring lock "531f935e-27e8-4b0d-b549-9693be7bff93" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1222.818455] env[68674]: DEBUG oslo_concurrency.lockutils [None req-f16b410b-b81b-477e-b7ed-6537711fe020 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Lock "531f935e-27e8-4b0d-b549-9693be7bff93" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.606s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1222.818455] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ad9ad74a-05c7-4ba8-9498-0849b180aa10 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Lock "531f935e-27e8-4b0d-b549-9693be7bff93" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.089s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1222.818644] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ad9ad74a-05c7-4ba8-9498-0849b180aa10 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Acquiring lock "531f935e-27e8-4b0d-b549-9693be7bff93-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1222.818771] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ad9ad74a-05c7-4ba8-9498-0849b180aa10 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Lock "531f935e-27e8-4b0d-b549-9693be7bff93-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1222.818936] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ad9ad74a-05c7-4ba8-9498-0849b180aa10 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Lock "531f935e-27e8-4b0d-b549-9693be7bff93-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1222.821047] env[68674]: INFO nova.compute.manager [None req-ad9ad74a-05c7-4ba8-9498-0849b180aa10 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Terminating instance [ 1223.070671] env[68674]: DEBUG oslo_concurrency.lockutils [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Releasing lock "refresh_cache-2f698e5c-6be5-4747-b006-6ed6dd512f79" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1223.071136] env[68674]: DEBUG nova.compute.manager [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Instance network_info: |[{"id": "fe2b8729-9ef5-4a98-b38f-405833365c1c", "address": "fa:16:3e:d2:2a:2b", "network": {"id": "14f41484-287c-4789-9e0c-fcc5f0e92e0d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-787923662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81afe76c94de4e94b53f15af0ef95e66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "877ed63d-906e-4bd5-a1fc-7e82d172d41e", "external-id": "nsx-vlan-transportzone-642", "segmentation_id": 642, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe2b8729-9e", "ovs_interfaceid": "fe2b8729-9ef5-4a98-b38f-405833365c1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1223.071650] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d2:2a:2b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '877ed63d-906e-4bd5-a1fc-7e82d172d41e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fe2b8729-9ef5-4a98-b38f-405833365c1c', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1223.079434] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1223.079710] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1223.079969] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-92af6da4-d7ad-4e4d-97fc-080759e7e11f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.100684] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1223.100684] env[68674]: value = "task-3241077" [ 1223.100684] env[68674]: _type = "Task" [ 1223.100684] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.108325] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241077, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.301389] env[68674]: DEBUG nova.compute.manager [req-b257b7be-44b3-4cc8-83d8-188ce4526c8c req-c646ae58-56cc-4b18-80d4-cc69ea33a7d0 service nova] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Received event network-changed-fe2b8729-9ef5-4a98-b38f-405833365c1c {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1223.301815] env[68674]: DEBUG nova.compute.manager [req-b257b7be-44b3-4cc8-83d8-188ce4526c8c req-c646ae58-56cc-4b18-80d4-cc69ea33a7d0 service nova] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Refreshing instance network info cache due to event network-changed-fe2b8729-9ef5-4a98-b38f-405833365c1c. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1223.302101] env[68674]: DEBUG oslo_concurrency.lockutils [req-b257b7be-44b3-4cc8-83d8-188ce4526c8c req-c646ae58-56cc-4b18-80d4-cc69ea33a7d0 service nova] Acquiring lock "refresh_cache-2f698e5c-6be5-4747-b006-6ed6dd512f79" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1223.302358] env[68674]: DEBUG oslo_concurrency.lockutils [req-b257b7be-44b3-4cc8-83d8-188ce4526c8c req-c646ae58-56cc-4b18-80d4-cc69ea33a7d0 service nova] Acquired lock "refresh_cache-2f698e5c-6be5-4747-b006-6ed6dd512f79" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1223.302574] env[68674]: DEBUG nova.network.neutron [req-b257b7be-44b3-4cc8-83d8-188ce4526c8c req-c646ae58-56cc-4b18-80d4-cc69ea33a7d0 service nova] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Refreshing network info cache for port fe2b8729-9ef5-4a98-b38f-405833365c1c {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1223.324552] env[68674]: DEBUG nova.compute.manager [None req-ad9ad74a-05c7-4ba8-9498-0849b180aa10 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1223.324868] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ad9ad74a-05c7-4ba8-9498-0849b180aa10 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1223.325854] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a448bfe-adfe-4c90-8e0f-098c6f68efc2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.334649] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad9ad74a-05c7-4ba8-9498-0849b180aa10 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1223.334929] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-26854a76-e557-4450-a746-c05786e08fe4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.342182] env[68674]: DEBUG oslo_vmware.api [None req-ad9ad74a-05c7-4ba8-9498-0849b180aa10 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Waiting for the task: (returnval){ [ 1223.342182] env[68674]: value = "task-3241078" [ 1223.342182] env[68674]: _type = "Task" [ 1223.342182] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.350107] env[68674]: DEBUG oslo_vmware.api [None req-ad9ad74a-05c7-4ba8-9498-0849b180aa10 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Task: {'id': task-3241078, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.610644] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241077, 'name': CreateVM_Task, 'duration_secs': 0.355794} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.610845] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1223.611529] env[68674]: DEBUG oslo_concurrency.lockutils [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1223.611815] env[68674]: DEBUG oslo_concurrency.lockutils [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1223.612092] env[68674]: DEBUG oslo_concurrency.lockutils [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1223.612363] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9475f544-4bc9-48bc-bff4-f2a834e802c7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.616925] env[68674]: DEBUG oslo_vmware.api [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1223.616925] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52264027-b053-4ccd-5d67-025cc39a6a7a" [ 1223.616925] env[68674]: _type = "Task" [ 1223.616925] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.625392] env[68674]: DEBUG oslo_vmware.api [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52264027-b053-4ccd-5d67-025cc39a6a7a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.851712] env[68674]: DEBUG oslo_vmware.api [None req-ad9ad74a-05c7-4ba8-9498-0849b180aa10 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Task: {'id': task-3241078, 'name': PowerOffVM_Task, 'duration_secs': 0.173911} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.851968] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad9ad74a-05c7-4ba8-9498-0849b180aa10 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1223.852150] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ad9ad74a-05c7-4ba8-9498-0849b180aa10 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1223.852393] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c3c5386a-c5bc-441e-887b-69198f38ce4e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.916265] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ad9ad74a-05c7-4ba8-9498-0849b180aa10 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1223.916661] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ad9ad74a-05c7-4ba8-9498-0849b180aa10 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1223.916897] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad9ad74a-05c7-4ba8-9498-0849b180aa10 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Deleting the datastore file [datastore1] 531f935e-27e8-4b0d-b549-9693be7bff93 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1223.918065] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-025aa403-74d4-42ab-8540-58c1f00efd55 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.923701] env[68674]: DEBUG oslo_vmware.api [None req-ad9ad74a-05c7-4ba8-9498-0849b180aa10 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Waiting for the task: (returnval){ [ 1223.923701] env[68674]: value = "task-3241080" [ 1223.923701] env[68674]: _type = "Task" [ 1223.923701] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.931712] env[68674]: DEBUG oslo_vmware.api [None req-ad9ad74a-05c7-4ba8-9498-0849b180aa10 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Task: {'id': task-3241080, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.996069] env[68674]: DEBUG nova.network.neutron [req-b257b7be-44b3-4cc8-83d8-188ce4526c8c req-c646ae58-56cc-4b18-80d4-cc69ea33a7d0 service nova] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Updated VIF entry in instance network info cache for port fe2b8729-9ef5-4a98-b38f-405833365c1c. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1223.996510] env[68674]: DEBUG nova.network.neutron [req-b257b7be-44b3-4cc8-83d8-188ce4526c8c req-c646ae58-56cc-4b18-80d4-cc69ea33a7d0 service nova] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Updating instance_info_cache with network_info: [{"id": "fe2b8729-9ef5-4a98-b38f-405833365c1c", "address": "fa:16:3e:d2:2a:2b", "network": {"id": "14f41484-287c-4789-9e0c-fcc5f0e92e0d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-787923662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81afe76c94de4e94b53f15af0ef95e66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "877ed63d-906e-4bd5-a1fc-7e82d172d41e", "external-id": "nsx-vlan-transportzone-642", "segmentation_id": 642, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe2b8729-9e", "ovs_interfaceid": "fe2b8729-9ef5-4a98-b38f-405833365c1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1224.025820] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fc70e28e-7bb8-480d-943a-b926365cf171 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquiring lock "f4751bd8-e0df-4686-a22f-e51a4a98b8d6" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1224.026159] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fc70e28e-7bb8-480d-943a-b926365cf171 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "f4751bd8-e0df-4686-a22f-e51a4a98b8d6" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1224.128897] env[68674]: DEBUG oslo_vmware.api [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52264027-b053-4ccd-5d67-025cc39a6a7a, 'name': SearchDatastore_Task, 'duration_secs': 0.010941} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.129219] env[68674]: DEBUG oslo_concurrency.lockutils [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1224.129463] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1224.129700] env[68674]: DEBUG oslo_concurrency.lockutils [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1224.130145] env[68674]: DEBUG oslo_concurrency.lockutils [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1224.130145] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1224.130304] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c85248a7-4ca6-40ca-818f-eb7a0e2bfe06 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.138779] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1224.138962] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1224.139648] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7313b2bf-16bd-467d-89b0-abf737d7ac2e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.145183] env[68674]: DEBUG oslo_vmware.api [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1224.145183] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]520e626a-c8c3-adcb-eea9-ebe90f377b0b" [ 1224.145183] env[68674]: _type = "Task" [ 1224.145183] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.152456] env[68674]: DEBUG oslo_vmware.api [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]520e626a-c8c3-adcb-eea9-ebe90f377b0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.432995] env[68674]: DEBUG oslo_vmware.api [None req-ad9ad74a-05c7-4ba8-9498-0849b180aa10 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Task: {'id': task-3241080, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145214} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.433373] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad9ad74a-05c7-4ba8-9498-0849b180aa10 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1224.433495] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ad9ad74a-05c7-4ba8-9498-0849b180aa10 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1224.433667] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-ad9ad74a-05c7-4ba8-9498-0849b180aa10 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1224.433847] env[68674]: INFO nova.compute.manager [None req-ad9ad74a-05c7-4ba8-9498-0849b180aa10 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1224.434113] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ad9ad74a-05c7-4ba8-9498-0849b180aa10 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1224.434319] env[68674]: DEBUG nova.compute.manager [-] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1224.434409] env[68674]: DEBUG nova.network.neutron [-] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1224.499823] env[68674]: DEBUG oslo_concurrency.lockutils [req-b257b7be-44b3-4cc8-83d8-188ce4526c8c req-c646ae58-56cc-4b18-80d4-cc69ea33a7d0 service nova] Releasing lock "refresh_cache-2f698e5c-6be5-4747-b006-6ed6dd512f79" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1224.529095] env[68674]: DEBUG nova.compute.utils [None req-fc70e28e-7bb8-480d-943a-b926365cf171 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1224.656360] env[68674]: DEBUG oslo_vmware.api [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]520e626a-c8c3-adcb-eea9-ebe90f377b0b, 'name': SearchDatastore_Task, 'duration_secs': 0.010042} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.657260] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc225a73-4f5a-46bf-8390-c1ff5630802a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.662635] env[68674]: DEBUG oslo_vmware.api [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1224.662635] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5254401a-5428-c6a1-481e-16a6c7f42523" [ 1224.662635] env[68674]: _type = "Task" [ 1224.662635] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.671107] env[68674]: DEBUG oslo_vmware.api [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5254401a-5428-c6a1-481e-16a6c7f42523, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.031722] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fc70e28e-7bb8-480d-943a-b926365cf171 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "f4751bd8-e0df-4686-a22f-e51a4a98b8d6" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.005s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1225.173116] env[68674]: DEBUG oslo_vmware.api [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5254401a-5428-c6a1-481e-16a6c7f42523, 'name': SearchDatastore_Task, 'duration_secs': 0.00958} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.173390] env[68674]: DEBUG oslo_concurrency.lockutils [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1225.173649] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 2f698e5c-6be5-4747-b006-6ed6dd512f79/2f698e5c-6be5-4747-b006-6ed6dd512f79.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1225.173903] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9662f052-dcc0-473d-bc82-c558a95d243d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.179778] env[68674]: DEBUG oslo_vmware.api [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1225.179778] env[68674]: value = "task-3241081" [ 1225.179778] env[68674]: _type = "Task" [ 1225.179778] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.186638] env[68674]: DEBUG oslo_vmware.api [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241081, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.203107] env[68674]: DEBUG nova.network.neutron [-] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1225.350728] env[68674]: DEBUG nova.compute.manager [req-c1bc133f-6098-42c1-a272-f0d103db0a21 req-c73b30a6-0299-4e3a-b1b1-a0d06a258579 service nova] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Received event network-vif-deleted-f2acd622-62cd-45c3-b22b-33d482cc9598 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1225.690643] env[68674]: DEBUG oslo_vmware.api [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241081, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.706255] env[68674]: INFO nova.compute.manager [-] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Took 1.27 seconds to deallocate network for instance. [ 1225.918629] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9f8925c0-ce97-44d5-bddf-e31685623697 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1225.918922] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9f8925c0-ce97-44d5-bddf-e31685623697 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1225.919159] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9f8925c0-ce97-44d5-bddf-e31685623697 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1225.919765] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9f8925c0-ce97-44d5-bddf-e31685623697 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1225.919954] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9f8925c0-ce97-44d5-bddf-e31685623697 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1225.921937] env[68674]: INFO nova.compute.manager [None req-9f8925c0-ce97-44d5-bddf-e31685623697 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Terminating instance [ 1226.103197] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fc70e28e-7bb8-480d-943a-b926365cf171 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquiring lock "f4751bd8-e0df-4686-a22f-e51a4a98b8d6" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1226.103452] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fc70e28e-7bb8-480d-943a-b926365cf171 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "f4751bd8-e0df-4686-a22f-e51a4a98b8d6" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1226.103692] env[68674]: INFO nova.compute.manager [None req-fc70e28e-7bb8-480d-943a-b926365cf171 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Attaching volume 4e2f6062-a545-44e7-8eff-1758bf08c9e3 to /dev/sdb [ 1226.137901] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8f15cad-31d5-4781-9330-13eef0b62572 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.144568] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3482a0f8-5d35-4326-9f24-0960f59721de {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.157549] env[68674]: DEBUG nova.virt.block_device [None req-fc70e28e-7bb8-480d-943a-b926365cf171 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Updating existing volume attachment record: 591f094c-305d-4eed-81a5-b79d877e42a6 {{(pid=68674) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1226.189789] env[68674]: DEBUG oslo_vmware.api [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241081, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.51309} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.189789] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] 2f698e5c-6be5-4747-b006-6ed6dd512f79/2f698e5c-6be5-4747-b006-6ed6dd512f79.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1226.189789] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1226.190053] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2c89d9b8-62bd-42a9-a6cc-bf43a26d02c6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.196026] env[68674]: DEBUG oslo_vmware.api [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1226.196026] env[68674]: value = "task-3241082" [ 1226.196026] env[68674]: _type = "Task" [ 1226.196026] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.203099] env[68674]: DEBUG oslo_vmware.api [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241082, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.212166] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ad9ad74a-05c7-4ba8-9498-0849b180aa10 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1226.212460] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ad9ad74a-05c7-4ba8-9498-0849b180aa10 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1226.212692] env[68674]: DEBUG nova.objects.instance [None req-ad9ad74a-05c7-4ba8-9498-0849b180aa10 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Lazy-loading 'resources' on Instance uuid 531f935e-27e8-4b0d-b549-9693be7bff93 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1226.426040] env[68674]: DEBUG nova.compute.manager [None req-9f8925c0-ce97-44d5-bddf-e31685623697 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1226.426232] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9f8925c0-ce97-44d5-bddf-e31685623697 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1226.427283] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e4a19d5-5e73-4ff5-b15a-25376634f691 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.435080] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f8925c0-ce97-44d5-bddf-e31685623697 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1226.435355] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-788cdeb3-5ce4-4154-8abf-e446d548501b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.441675] env[68674]: DEBUG oslo_vmware.api [None req-9f8925c0-ce97-44d5-bddf-e31685623697 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1226.441675] env[68674]: value = "task-3241084" [ 1226.441675] env[68674]: _type = "Task" [ 1226.441675] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.449832] env[68674]: DEBUG oslo_vmware.api [None req-9f8925c0-ce97-44d5-bddf-e31685623697 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241084, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.705274] env[68674]: DEBUG oslo_vmware.api [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241082, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067628} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.705622] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1226.706364] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07f6c6fd-2f57-40dc-8350-878f114c2976 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.729870] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Reconfiguring VM instance instance-0000007d to attach disk [datastore2] 2f698e5c-6be5-4747-b006-6ed6dd512f79/2f698e5c-6be5-4747-b006-6ed6dd512f79.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1226.730316] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d302846-7e3b-420f-89d1-8f4df55ce91b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.753622] env[68674]: DEBUG oslo_vmware.api [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1226.753622] env[68674]: value = "task-3241085" [ 1226.753622] env[68674]: _type = "Task" [ 1226.753622] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.761579] env[68674]: DEBUG oslo_vmware.api [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241085, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.812449] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c8da243-3363-45a1-ba5d-98e0439c72f6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.819231] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f76b641-8023-4746-83f4-54b2f4d42072 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.849164] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-779b8cfc-eb89-4f70-addd-c526904e417b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.855751] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-533b937c-7809-46bd-8250-561b476f8e5d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.868240] env[68674]: DEBUG nova.compute.provider_tree [None req-ad9ad74a-05c7-4ba8-9498-0849b180aa10 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1226.950996] env[68674]: DEBUG oslo_vmware.api [None req-9f8925c0-ce97-44d5-bddf-e31685623697 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241084, 'name': PowerOffVM_Task, 'duration_secs': 0.180983} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.951276] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f8925c0-ce97-44d5-bddf-e31685623697 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1226.951399] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9f8925c0-ce97-44d5-bddf-e31685623697 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1226.951641] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-456165bb-02ce-4db4-85bb-96d8164c6be6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.040518] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9f8925c0-ce97-44d5-bddf-e31685623697 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1227.040741] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9f8925c0-ce97-44d5-bddf-e31685623697 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1227.040875] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f8925c0-ce97-44d5-bddf-e31685623697 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Deleting the datastore file [datastore1] c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1227.041168] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e0331937-0668-4f62-b010-29ece303e940 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.048404] env[68674]: DEBUG oslo_vmware.api [None req-9f8925c0-ce97-44d5-bddf-e31685623697 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1227.048404] env[68674]: value = "task-3241087" [ 1227.048404] env[68674]: _type = "Task" [ 1227.048404] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.056035] env[68674]: DEBUG oslo_vmware.api [None req-9f8925c0-ce97-44d5-bddf-e31685623697 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241087, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.263262] env[68674]: DEBUG oslo_vmware.api [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241085, 'name': ReconfigVM_Task, 'duration_secs': 0.305122} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.263546] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Reconfigured VM instance instance-0000007d to attach disk [datastore2] 2f698e5c-6be5-4747-b006-6ed6dd512f79/2f698e5c-6be5-4747-b006-6ed6dd512f79.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1227.264175] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e2506058-3892-4393-b9e0-2935c02620cc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.270401] env[68674]: DEBUG oslo_vmware.api [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1227.270401] env[68674]: value = "task-3241088" [ 1227.270401] env[68674]: _type = "Task" [ 1227.270401] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.277173] env[68674]: DEBUG oslo_vmware.api [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241088, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.371456] env[68674]: DEBUG nova.scheduler.client.report [None req-ad9ad74a-05c7-4ba8-9498-0849b180aa10 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1227.558863] env[68674]: DEBUG oslo_vmware.api [None req-9f8925c0-ce97-44d5-bddf-e31685623697 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241087, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174232} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.559105] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f8925c0-ce97-44d5-bddf-e31685623697 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1227.559294] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9f8925c0-ce97-44d5-bddf-e31685623697 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1227.559472] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9f8925c0-ce97-44d5-bddf-e31685623697 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1227.559651] env[68674]: INFO nova.compute.manager [None req-9f8925c0-ce97-44d5-bddf-e31685623697 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1227.559886] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9f8925c0-ce97-44d5-bddf-e31685623697 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1227.560085] env[68674]: DEBUG nova.compute.manager [-] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1227.560183] env[68674]: DEBUG nova.network.neutron [-] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1227.781862] env[68674]: DEBUG oslo_vmware.api [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241088, 'name': Rename_Task, 'duration_secs': 0.14339} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.782291] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1227.782386] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cde84b83-d642-4b2c-9413-97e504be9bcf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.788388] env[68674]: DEBUG oslo_vmware.api [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1227.788388] env[68674]: value = "task-3241089" [ 1227.788388] env[68674]: _type = "Task" [ 1227.788388] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.796572] env[68674]: DEBUG oslo_vmware.api [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241089, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.876834] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ad9ad74a-05c7-4ba8-9498-0849b180aa10 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.664s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1227.904765] env[68674]: INFO nova.scheduler.client.report [None req-ad9ad74a-05c7-4ba8-9498-0849b180aa10 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Deleted allocations for instance 531f935e-27e8-4b0d-b549-9693be7bff93 [ 1227.996110] env[68674]: DEBUG nova.compute.manager [req-b22818e8-6bc8-40b9-bd9e-8b5f36088357 req-7fb07bba-0ab0-416b-b324-5f4b62a58447 service nova] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Received event network-vif-deleted-47cc2f82-8285-4168-b696-407ade0efaaf {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1227.996450] env[68674]: INFO nova.compute.manager [req-b22818e8-6bc8-40b9-bd9e-8b5f36088357 req-7fb07bba-0ab0-416b-b324-5f4b62a58447 service nova] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Neutron deleted interface 47cc2f82-8285-4168-b696-407ade0efaaf; detaching it from the instance and deleting it from the info cache [ 1227.996555] env[68674]: DEBUG nova.network.neutron [req-b22818e8-6bc8-40b9-bd9e-8b5f36088357 req-7fb07bba-0ab0-416b-b324-5f4b62a58447 service nova] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1228.299536] env[68674]: DEBUG oslo_vmware.api [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241089, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.412897] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ad9ad74a-05c7-4ba8-9498-0849b180aa10 tempest-ServerPasswordTestJSON-386322776 tempest-ServerPasswordTestJSON-386322776-project-member] Lock "531f935e-27e8-4b0d-b549-9693be7bff93" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.594s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1228.476605] env[68674]: DEBUG nova.network.neutron [-] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1228.499948] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-70bc36aa-35bf-400d-a780-f6e9659fdbc8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.509463] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7023c641-98b1-457e-bd3c-6daa8b6348d9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.537531] env[68674]: DEBUG nova.compute.manager [req-b22818e8-6bc8-40b9-bd9e-8b5f36088357 req-7fb07bba-0ab0-416b-b324-5f4b62a58447 service nova] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Detach interface failed, port_id=47cc2f82-8285-4168-b696-407ade0efaaf, reason: Instance c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1228.798739] env[68674]: DEBUG oslo_vmware.api [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241089, 'name': PowerOnVM_Task, 'duration_secs': 0.599374} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.799051] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1228.799228] env[68674]: INFO nova.compute.manager [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Took 7.65 seconds to spawn the instance on the hypervisor. [ 1228.799411] env[68674]: DEBUG nova.compute.manager [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1228.800195] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c7a19a-4c66-4f52-a22c-867710b56416 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.984573] env[68674]: INFO nova.compute.manager [-] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Took 1.42 seconds to deallocate network for instance. [ 1229.156017] env[68674]: DEBUG oslo_concurrency.lockutils [None req-323dde28-f4de-4c9c-94bf-7683699329e0 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "1b276f5a-9e53-4ef9-892b-4e4bd0dc09df" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1229.156017] env[68674]: DEBUG oslo_concurrency.lockutils [None req-323dde28-f4de-4c9c-94bf-7683699329e0 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "1b276f5a-9e53-4ef9-892b-4e4bd0dc09df" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1229.318995] env[68674]: INFO nova.compute.manager [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Took 12.38 seconds to build instance. [ 1229.489641] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9f8925c0-ce97-44d5-bddf-e31685623697 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1229.489940] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9f8925c0-ce97-44d5-bddf-e31685623697 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1229.490181] env[68674]: DEBUG nova.objects.instance [None req-9f8925c0-ce97-44d5-bddf-e31685623697 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lazy-loading 'resources' on Instance uuid c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1229.659541] env[68674]: DEBUG nova.compute.utils [None req-323dde28-f4de-4c9c-94bf-7683699329e0 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1229.821314] env[68674]: DEBUG oslo_concurrency.lockutils [None req-38e01f94-7c25-40f2-85d5-7364a336e004 tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "2f698e5c-6be5-4747-b006-6ed6dd512f79" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.891s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1230.104338] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee3c945e-becc-4918-87e1-93e9526bb8ab {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.112105] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd0304ab-4858-481a-a5c0-00b87ac9bf7a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.143159] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-226f502e-5319-4707-bcc7-7e00f97fed5e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.150838] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fcd64be-d5ae-4935-8a69-6e27f87d6f81 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.163983] env[68674]: DEBUG oslo_concurrency.lockutils [None req-323dde28-f4de-4c9c-94bf-7683699329e0 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "1b276f5a-9e53-4ef9-892b-4e4bd0dc09df" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1230.165021] env[68674]: DEBUG nova.compute.provider_tree [None req-9f8925c0-ce97-44d5-bddf-e31685623697 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1230.669530] env[68674]: DEBUG nova.scheduler.client.report [None req-9f8925c0-ce97-44d5-bddf-e31685623697 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1230.702035] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc70e28e-7bb8-480d-943a-b926365cf171 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Volume attach. Driver type: vmdk {{(pid=68674) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1230.702284] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc70e28e-7bb8-480d-943a-b926365cf171 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647724', 'volume_id': '4e2f6062-a545-44e7-8eff-1758bf08c9e3', 'name': 'volume-4e2f6062-a545-44e7-8eff-1758bf08c9e3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f4751bd8-e0df-4686-a22f-e51a4a98b8d6', 'attached_at': '', 'detached_at': '', 'volume_id': '4e2f6062-a545-44e7-8eff-1758bf08c9e3', 'serial': '4e2f6062-a545-44e7-8eff-1758bf08c9e3'} {{(pid=68674) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1230.703463] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-965d931c-d2d8-496c-a9cb-59f7315860c4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.720701] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f078d21c-e515-438b-9f4f-474ec3e71803 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.745030] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc70e28e-7bb8-480d-943a-b926365cf171 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Reconfiguring VM instance instance-0000007a to attach disk [datastore2] volume-4e2f6062-a545-44e7-8eff-1758bf08c9e3/volume-4e2f6062-a545-44e7-8eff-1758bf08c9e3.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1230.746258] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9980eb69-4ab6-462d-8ba9-1aa125dcc309 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.767076] env[68674]: DEBUG oslo_vmware.api [None req-fc70e28e-7bb8-480d-943a-b926365cf171 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for the task: (returnval){ [ 1230.767076] env[68674]: value = "task-3241091" [ 1230.767076] env[68674]: _type = "Task" [ 1230.767076] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.775839] env[68674]: DEBUG oslo_vmware.api [None req-fc70e28e-7bb8-480d-943a-b926365cf171 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3241091, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.157955] env[68674]: DEBUG nova.compute.manager [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Stashing vm_state: active {{(pid=68674) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1231.176143] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9f8925c0-ce97-44d5-bddf-e31685623697 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.686s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1231.199280] env[68674]: INFO nova.scheduler.client.report [None req-9f8925c0-ce97-44d5-bddf-e31685623697 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Deleted allocations for instance c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f [ 1231.262434] env[68674]: DEBUG oslo_concurrency.lockutils [None req-323dde28-f4de-4c9c-94bf-7683699329e0 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "1b276f5a-9e53-4ef9-892b-4e4bd0dc09df" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1231.262703] env[68674]: DEBUG oslo_concurrency.lockutils [None req-323dde28-f4de-4c9c-94bf-7683699329e0 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "1b276f5a-9e53-4ef9-892b-4e4bd0dc09df" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1231.262944] env[68674]: INFO nova.compute.manager [None req-323dde28-f4de-4c9c-94bf-7683699329e0 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Attaching volume 495cdb58-b7e0-4757-a9d1-af1aab8ca197 to /dev/sdb [ 1231.279389] env[68674]: DEBUG oslo_vmware.api [None req-fc70e28e-7bb8-480d-943a-b926365cf171 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3241091, 'name': ReconfigVM_Task, 'duration_secs': 0.511988} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.279676] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc70e28e-7bb8-480d-943a-b926365cf171 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Reconfigured VM instance instance-0000007a to attach disk [datastore2] volume-4e2f6062-a545-44e7-8eff-1758bf08c9e3/volume-4e2f6062-a545-44e7-8eff-1758bf08c9e3.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1231.284677] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b01947c5-00a2-49b2-ba8c-9d71dd0546dc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.296580] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-001d30c3-bc76-4bca-a768-27adaee3c641 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.304740] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00abd38a-9e55-48da-a19a-544b1dfdec03 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.307198] env[68674]: DEBUG oslo_vmware.api [None req-fc70e28e-7bb8-480d-943a-b926365cf171 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for the task: (returnval){ [ 1231.307198] env[68674]: value = "task-3241092" [ 1231.307198] env[68674]: _type = "Task" [ 1231.307198] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.316383] env[68674]: DEBUG oslo_vmware.api [None req-fc70e28e-7bb8-480d-943a-b926365cf171 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3241092, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.319732] env[68674]: DEBUG nova.virt.block_device [None req-323dde28-f4de-4c9c-94bf-7683699329e0 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Updating existing volume attachment record: 135b1a2e-7b62-47c3-a7e6-1cd9b0b0afd7 {{(pid=68674) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1231.679143] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1231.679439] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1231.706934] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9f8925c0-ce97-44d5-bddf-e31685623697 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.788s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1231.816543] env[68674]: DEBUG oslo_vmware.api [None req-fc70e28e-7bb8-480d-943a-b926365cf171 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3241092, 'name': ReconfigVM_Task, 'duration_secs': 0.224628} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.816882] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc70e28e-7bb8-480d-943a-b926365cf171 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647724', 'volume_id': '4e2f6062-a545-44e7-8eff-1758bf08c9e3', 'name': 'volume-4e2f6062-a545-44e7-8eff-1758bf08c9e3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f4751bd8-e0df-4686-a22f-e51a4a98b8d6', 'attached_at': '', 'detached_at': '', 'volume_id': '4e2f6062-a545-44e7-8eff-1758bf08c9e3', 'serial': '4e2f6062-a545-44e7-8eff-1758bf08c9e3'} {{(pid=68674) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1232.184388] env[68674]: INFO nova.compute.claims [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1232.408250] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1232.408524] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1232.690617] env[68674]: INFO nova.compute.resource_tracker [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Updating resource usage from migration 0a5b3e05-4b5a-459f-85f3-ddd65c04f099 [ 1232.755117] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6db258b-cf74-44af-98db-6b750d1e165a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.762857] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74112f51-0e4f-4ea6-811d-576558a1133a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.792261] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ba06c3e-ef6f-449d-bcdd-b758270d1e96 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.799128] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-822aefbd-6b38-4a03-8d65-7d9be273a70c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.813756] env[68674]: DEBUG nova.compute.provider_tree [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1232.852968] env[68674]: DEBUG nova.objects.instance [None req-fc70e28e-7bb8-480d-943a-b926365cf171 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lazy-loading 'flavor' on Instance uuid f4751bd8-e0df-4686-a22f-e51a4a98b8d6 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1232.913841] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1232.913958] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1232.914130] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1232.914268] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1232.914766] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1232.914766] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1232.914766] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68674) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1232.915152] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager.update_available_resource {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1233.317036] env[68674]: DEBUG nova.scheduler.client.report [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1233.357421] env[68674]: DEBUG oslo_concurrency.lockutils [None req-fc70e28e-7bb8-480d-943a-b926365cf171 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "f4751bd8-e0df-4686-a22f-e51a4a98b8d6" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.254s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1233.418612] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1233.687751] env[68674]: DEBUG nova.compute.manager [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Stashing vm_state: active {{(pid=68674) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1233.823520] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.144s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1233.823751] env[68674]: INFO nova.compute.manager [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Migrating [ 1233.830055] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.411s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1233.830253] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1233.830409] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68674) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1233.833848] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b5a005e-0ca1-46a4-ba22-8b55a8b4f24e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.846140] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04965477-b413-42a2-ad28-850ca28209a4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.861572] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95d5d344-d402-4db1-bb5c-c246b9e75223 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.868229] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ef1d26-b1d1-40a0-9c9e-a1c9593fdf30 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.898438] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179788MB free_disk=120GB free_vcpus=48 pci_devices=None {{(pid=68674) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1233.898550] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1233.898730] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1234.173155] env[68674]: DEBUG oslo_concurrency.lockutils [None req-60059494-b577-42d9-8d5b-9d76dec43b26 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquiring lock "f4751bd8-e0df-4686-a22f-e51a4a98b8d6" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1234.173406] env[68674]: DEBUG oslo_concurrency.lockutils [None req-60059494-b577-42d9-8d5b-9d76dec43b26 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "f4751bd8-e0df-4686-a22f-e51a4a98b8d6" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1234.205691] env[68674]: DEBUG oslo_concurrency.lockutils [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1234.341769] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "refresh_cache-2f698e5c-6be5-4747-b006-6ed6dd512f79" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1234.342222] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquired lock "refresh_cache-2f698e5c-6be5-4747-b006-6ed6dd512f79" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1234.342222] env[68674]: DEBUG nova.network.neutron [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1234.677034] env[68674]: DEBUG nova.compute.utils [None req-60059494-b577-42d9-8d5b-9d76dec43b26 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1234.907142] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Applying migration context for instance 2f698e5c-6be5-4747-b006-6ed6dd512f79 as it has an incoming, in-progress migration 0a5b3e05-4b5a-459f-85f3-ddd65c04f099. Migration status is pre-migrating {{(pid=68674) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1234.907944] env[68674]: INFO nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Updating resource usage from migration 0a5b3e05-4b5a-459f-85f3-ddd65c04f099 [ 1234.908274] env[68674]: INFO nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Updating resource usage from migration 6ce6bc59-5dc9-4a62-87aa-05653c67fb99 [ 1234.925853] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1234.926086] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance f4751bd8-e0df-4686-a22f-e51a4a98b8d6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1234.926247] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Migration 0a5b3e05-4b5a-459f-85f3-ddd65c04f099 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1234.926370] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 2f698e5c-6be5-4747-b006-6ed6dd512f79 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1234.926520] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Migration 6ce6bc59-5dc9-4a62-87aa-05653c67fb99 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1234.926675] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance 30731a3c-34ba-40c8-9b8f-2d867eff4f21 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1234.926858] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=68674) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1234.926994] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=68674) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1235.000379] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5f82544-534a-4505-9240-78e0151c78f9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.009822] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80ad4144-db49-4f55-9493-e8234f07b4c9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.040216] env[68674]: DEBUG nova.network.neutron [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Updating instance_info_cache with network_info: [{"id": "fe2b8729-9ef5-4a98-b38f-405833365c1c", "address": "fa:16:3e:d2:2a:2b", "network": {"id": "14f41484-287c-4789-9e0c-fcc5f0e92e0d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-787923662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81afe76c94de4e94b53f15af0ef95e66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "877ed63d-906e-4bd5-a1fc-7e82d172d41e", "external-id": "nsx-vlan-transportzone-642", "segmentation_id": 642, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe2b8729-9e", "ovs_interfaceid": "fe2b8729-9ef5-4a98-b38f-405833365c1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1235.041798] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a7e8b10-de68-4cee-9f16-4c2b8628ca5d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.048732] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c27fd18-6207-46c3-bfef-aed1bdae6d1e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.062899] env[68674]: DEBUG nova.compute.provider_tree [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1235.179771] env[68674]: DEBUG oslo_concurrency.lockutils [None req-60059494-b577-42d9-8d5b-9d76dec43b26 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "f4751bd8-e0df-4686-a22f-e51a4a98b8d6" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1235.545080] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Releasing lock "refresh_cache-2f698e5c-6be5-4747-b006-6ed6dd512f79" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1235.565922] env[68674]: DEBUG nova.scheduler.client.report [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1235.867039] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-323dde28-f4de-4c9c-94bf-7683699329e0 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Volume attach. Driver type: vmdk {{(pid=68674) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1235.867039] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-323dde28-f4de-4c9c-94bf-7683699329e0 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647725', 'volume_id': '495cdb58-b7e0-4757-a9d1-af1aab8ca197', 'name': 'volume-495cdb58-b7e0-4757-a9d1-af1aab8ca197', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1b276f5a-9e53-4ef9-892b-4e4bd0dc09df', 'attached_at': '', 'detached_at': '', 'volume_id': '495cdb58-b7e0-4757-a9d1-af1aab8ca197', 'serial': '495cdb58-b7e0-4757-a9d1-af1aab8ca197'} {{(pid=68674) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1235.868038] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da3a146a-f924-4e71-b4ca-54004e2f44b4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.884294] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26ad275b-2d44-4c9a-a99f-363f9ca892ab {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.909891] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-323dde28-f4de-4c9c-94bf-7683699329e0 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Reconfiguring VM instance instance-00000079 to attach disk [datastore2] volume-495cdb58-b7e0-4757-a9d1-af1aab8ca197/volume-495cdb58-b7e0-4757-a9d1-af1aab8ca197.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1235.910200] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b12ec57-2e59-473d-a83f-b5178e4c8a83 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.928704] env[68674]: DEBUG oslo_vmware.api [None req-323dde28-f4de-4c9c-94bf-7683699329e0 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1235.928704] env[68674]: value = "task-3241095" [ 1235.928704] env[68674]: _type = "Task" [ 1235.928704] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.936829] env[68674]: DEBUG oslo_vmware.api [None req-323dde28-f4de-4c9c-94bf-7683699329e0 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3241095, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.070163] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68674) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1236.070370] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.172s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1236.070595] env[68674]: DEBUG oslo_concurrency.lockutils [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 1.865s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1236.238954] env[68674]: DEBUG oslo_concurrency.lockutils [None req-60059494-b577-42d9-8d5b-9d76dec43b26 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquiring lock "f4751bd8-e0df-4686-a22f-e51a4a98b8d6" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1236.239320] env[68674]: DEBUG oslo_concurrency.lockutils [None req-60059494-b577-42d9-8d5b-9d76dec43b26 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "f4751bd8-e0df-4686-a22f-e51a4a98b8d6" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1236.239611] env[68674]: INFO nova.compute.manager [None req-60059494-b577-42d9-8d5b-9d76dec43b26 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Attaching volume e293d604-9044-4db3-88ad-4187491aa18a to /dev/sdc [ 1236.269736] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd6041ae-bf63-420c-9a57-bf281e8f6511 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.277258] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1a7cf39-3cf3-4f8a-8bab-932ae3ee70b5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.290300] env[68674]: DEBUG nova.virt.block_device [None req-60059494-b577-42d9-8d5b-9d76dec43b26 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Updating existing volume attachment record: 9f9d9c13-28fe-4dcf-8a57-652c5545dd5c {{(pid=68674) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1236.438501] env[68674]: DEBUG oslo_vmware.api [None req-323dde28-f4de-4c9c-94bf-7683699329e0 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3241095, 'name': ReconfigVM_Task, 'duration_secs': 0.343386} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.438782] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-323dde28-f4de-4c9c-94bf-7683699329e0 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Reconfigured VM instance instance-00000079 to attach disk [datastore2] volume-495cdb58-b7e0-4757-a9d1-af1aab8ca197/volume-495cdb58-b7e0-4757-a9d1-af1aab8ca197.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1236.443362] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2f25661c-11fc-4b67-a62e-3ed3daa3c2af {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.458682] env[68674]: DEBUG oslo_vmware.api [None req-323dde28-f4de-4c9c-94bf-7683699329e0 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1236.458682] env[68674]: value = "task-3241097" [ 1236.458682] env[68674]: _type = "Task" [ 1236.458682] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.466863] env[68674]: DEBUG oslo_vmware.api [None req-323dde28-f4de-4c9c-94bf-7683699329e0 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3241097, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.575476] env[68674]: INFO nova.compute.claims [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1236.968913] env[68674]: DEBUG oslo_vmware.api [None req-323dde28-f4de-4c9c-94bf-7683699329e0 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3241097, 'name': ReconfigVM_Task, 'duration_secs': 0.14968} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.969306] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-323dde28-f4de-4c9c-94bf-7683699329e0 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647725', 'volume_id': '495cdb58-b7e0-4757-a9d1-af1aab8ca197', 'name': 'volume-495cdb58-b7e0-4757-a9d1-af1aab8ca197', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1b276f5a-9e53-4ef9-892b-4e4bd0dc09df', 'attached_at': '', 'detached_at': '', 'volume_id': '495cdb58-b7e0-4757-a9d1-af1aab8ca197', 'serial': '495cdb58-b7e0-4757-a9d1-af1aab8ca197'} {{(pid=68674) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1237.060071] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27795750-e12d-4448-a559-611543f8ca86 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.077179] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Updating instance '2f698e5c-6be5-4747-b006-6ed6dd512f79' progress to 0 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1237.081841] env[68674]: INFO nova.compute.resource_tracker [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Updating resource usage from migration 6ce6bc59-5dc9-4a62-87aa-05653c67fb99 [ 1237.158192] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f0c16b7-15fc-4dbe-801b-b9fe65047fa5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.165311] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e436bc3-122e-4020-b042-9f25f22b292f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.194272] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73b92dbc-9d0a-4028-8704-515c5d989548 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.201010] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aaa8dd8-a55a-4d96-a02b-c406fa9a06e1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.213424] env[68674]: DEBUG nova.compute.provider_tree [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1237.582331] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1237.582719] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e610f157-7ee4-4d57-b373-7fd5c1ad2e3b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.589778] env[68674]: DEBUG oslo_vmware.api [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1237.589778] env[68674]: value = "task-3241098" [ 1237.589778] env[68674]: _type = "Task" [ 1237.589778] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.598634] env[68674]: DEBUG oslo_vmware.api [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241098, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.716550] env[68674]: DEBUG nova.scheduler.client.report [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1238.005126] env[68674]: DEBUG nova.objects.instance [None req-323dde28-f4de-4c9c-94bf-7683699329e0 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lazy-loading 'flavor' on Instance uuid 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1238.099496] env[68674]: DEBUG oslo_vmware.api [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241098, 'name': PowerOffVM_Task, 'duration_secs': 0.219478} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.099752] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1238.101027] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Updating instance '2f698e5c-6be5-4747-b006-6ed6dd512f79' progress to 17 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1238.221429] env[68674]: DEBUG oslo_concurrency.lockutils [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.151s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1238.221707] env[68674]: INFO nova.compute.manager [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Migrating [ 1238.510661] env[68674]: DEBUG oslo_concurrency.lockutils [None req-323dde28-f4de-4c9c-94bf-7683699329e0 tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "1b276f5a-9e53-4ef9-892b-4e4bd0dc09df" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.248s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1238.606405] env[68674]: DEBUG nova.virt.hardware [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1238.606878] env[68674]: DEBUG nova.virt.hardware [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1238.606878] env[68674]: DEBUG nova.virt.hardware [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1238.607081] env[68674]: DEBUG nova.virt.hardware [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1238.607284] env[68674]: DEBUG nova.virt.hardware [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1238.607469] env[68674]: DEBUG nova.virt.hardware [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1238.607700] env[68674]: DEBUG nova.virt.hardware [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1238.607895] env[68674]: DEBUG nova.virt.hardware [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1238.608101] env[68674]: DEBUG nova.virt.hardware [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1238.608330] env[68674]: DEBUG nova.virt.hardware [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1238.608557] env[68674]: DEBUG nova.virt.hardware [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1238.613968] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fdd02d1e-1dd6-407a-a0b8-d4d20604519c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.631644] env[68674]: DEBUG oslo_vmware.api [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1238.631644] env[68674]: value = "task-3241100" [ 1238.631644] env[68674]: _type = "Task" [ 1238.631644] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.639959] env[68674]: DEBUG oslo_vmware.api [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241100, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.671140] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6fc5921b-f224-4d14-82cf-9e038b65f04a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "1b276f5a-9e53-4ef9-892b-4e4bd0dc09df" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1238.671412] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6fc5921b-f224-4d14-82cf-9e038b65f04a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "1b276f5a-9e53-4ef9-892b-4e4bd0dc09df" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1238.734841] env[68674]: DEBUG oslo_concurrency.lockutils [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "refresh_cache-30731a3c-34ba-40c8-9b8f-2d867eff4f21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1238.735060] env[68674]: DEBUG oslo_concurrency.lockutils [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquired lock "refresh_cache-30731a3c-34ba-40c8-9b8f-2d867eff4f21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1238.735219] env[68674]: DEBUG nova.network.neutron [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1239.142166] env[68674]: DEBUG oslo_vmware.api [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241100, 'name': ReconfigVM_Task, 'duration_secs': 0.160661} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.142419] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Updating instance '2f698e5c-6be5-4747-b006-6ed6dd512f79' progress to 33 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1239.175040] env[68674]: INFO nova.compute.manager [None req-6fc5921b-f224-4d14-82cf-9e038b65f04a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Detaching volume 495cdb58-b7e0-4757-a9d1-af1aab8ca197 [ 1239.205497] env[68674]: INFO nova.virt.block_device [None req-6fc5921b-f224-4d14-82cf-9e038b65f04a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Attempting to driver detach volume 495cdb58-b7e0-4757-a9d1-af1aab8ca197 from mountpoint /dev/sdb [ 1239.205742] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fc5921b-f224-4d14-82cf-9e038b65f04a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Volume detach. Driver type: vmdk {{(pid=68674) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1239.205935] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fc5921b-f224-4d14-82cf-9e038b65f04a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647725', 'volume_id': '495cdb58-b7e0-4757-a9d1-af1aab8ca197', 'name': 'volume-495cdb58-b7e0-4757-a9d1-af1aab8ca197', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1b276f5a-9e53-4ef9-892b-4e4bd0dc09df', 'attached_at': '', 'detached_at': '', 'volume_id': '495cdb58-b7e0-4757-a9d1-af1aab8ca197', 'serial': '495cdb58-b7e0-4757-a9d1-af1aab8ca197'} {{(pid=68674) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1239.206859] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-634edbf1-585f-423b-acb7-196fa690f8d4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.228919] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97c28433-e119-445d-86b3-32119f36e507 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.236433] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67264ebb-e853-4f3e-88c0-6ae6ddd3ed79 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.260870] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccba7777-3546-443c-b3ed-19b261b3199e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.275426] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fc5921b-f224-4d14-82cf-9e038b65f04a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] The volume has not been displaced from its original location: [datastore2] volume-495cdb58-b7e0-4757-a9d1-af1aab8ca197/volume-495cdb58-b7e0-4757-a9d1-af1aab8ca197.vmdk. No consolidation needed. {{(pid=68674) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1239.280566] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fc5921b-f224-4d14-82cf-9e038b65f04a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Reconfiguring VM instance instance-00000079 to detach disk 2001 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1239.282886] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f4eea320-c6fb-494a-8612-6ed929557f42 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.300473] env[68674]: DEBUG oslo_vmware.api [None req-6fc5921b-f224-4d14-82cf-9e038b65f04a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1239.300473] env[68674]: value = "task-3241101" [ 1239.300473] env[68674]: _type = "Task" [ 1239.300473] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.308011] env[68674]: DEBUG oslo_vmware.api [None req-6fc5921b-f224-4d14-82cf-9e038b65f04a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3241101, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.460941] env[68674]: DEBUG nova.network.neutron [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Updating instance_info_cache with network_info: [{"id": "fa6ef6fe-e229-4cc2-8230-7318adaa728e", "address": "fa:16:3e:6c:f7:a2", "network": {"id": "f2a6b57a-fec9-4bd2-9828-2b72f21f2393", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1479923638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.236", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f2a133c72064227bd419d63d5d9557f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa6ef6fe-e2", "ovs_interfaceid": "fa6ef6fe-e229-4cc2-8230-7318adaa728e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1239.648838] env[68674]: DEBUG nova.virt.hardware [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1239.649284] env[68674]: DEBUG nova.virt.hardware [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1239.649284] env[68674]: DEBUG nova.virt.hardware [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1239.649467] env[68674]: DEBUG nova.virt.hardware [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1239.649617] env[68674]: DEBUG nova.virt.hardware [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1239.649768] env[68674]: DEBUG nova.virt.hardware [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1239.649971] env[68674]: DEBUG nova.virt.hardware [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1239.650156] env[68674]: DEBUG nova.virt.hardware [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1239.650404] env[68674]: DEBUG nova.virt.hardware [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1239.650570] env[68674]: DEBUG nova.virt.hardware [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1239.650749] env[68674]: DEBUG nova.virt.hardware [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1239.656306] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Reconfiguring VM instance instance-0000007d to detach disk 2000 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1239.656602] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ac0605f-e434-49ec-875b-d740faf0505f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.674956] env[68674]: DEBUG oslo_vmware.api [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1239.674956] env[68674]: value = "task-3241102" [ 1239.674956] env[68674]: _type = "Task" [ 1239.674956] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.682632] env[68674]: DEBUG oslo_vmware.api [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241102, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.810808] env[68674]: DEBUG oslo_vmware.api [None req-6fc5921b-f224-4d14-82cf-9e038b65f04a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3241101, 'name': ReconfigVM_Task, 'duration_secs': 0.193497} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.811086] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fc5921b-f224-4d14-82cf-9e038b65f04a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Reconfigured VM instance instance-00000079 to detach disk 2001 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1239.815626] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b55c99f1-2266-4b39-9233-c6541833c9d3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.830146] env[68674]: DEBUG oslo_vmware.api [None req-6fc5921b-f224-4d14-82cf-9e038b65f04a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1239.830146] env[68674]: value = "task-3241103" [ 1239.830146] env[68674]: _type = "Task" [ 1239.830146] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.840257] env[68674]: DEBUG oslo_vmware.api [None req-6fc5921b-f224-4d14-82cf-9e038b65f04a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3241103, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.963939] env[68674]: DEBUG oslo_concurrency.lockutils [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Releasing lock "refresh_cache-30731a3c-34ba-40c8-9b8f-2d867eff4f21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1240.184384] env[68674]: DEBUG oslo_vmware.api [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241102, 'name': ReconfigVM_Task, 'duration_secs': 0.144034} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.184670] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Reconfigured VM instance instance-0000007d to detach disk 2000 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1240.185485] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afb8b18c-e0dc-4b07-aac3-79c8a1d7f89e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.206995] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Reconfiguring VM instance instance-0000007d to attach disk [datastore2] 2f698e5c-6be5-4747-b006-6ed6dd512f79/2f698e5c-6be5-4747-b006-6ed6dd512f79.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1240.207229] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e5e44f74-87a4-426b-99e6-d21f4910c459 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.225089] env[68674]: DEBUG oslo_vmware.api [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1240.225089] env[68674]: value = "task-3241104" [ 1240.225089] env[68674]: _type = "Task" [ 1240.225089] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.232290] env[68674]: DEBUG oslo_vmware.api [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241104, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.339478] env[68674]: DEBUG oslo_vmware.api [None req-6fc5921b-f224-4d14-82cf-9e038b65f04a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3241103, 'name': ReconfigVM_Task, 'duration_secs': 0.137457} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.339781] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fc5921b-f224-4d14-82cf-9e038b65f04a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647725', 'volume_id': '495cdb58-b7e0-4757-a9d1-af1aab8ca197', 'name': 'volume-495cdb58-b7e0-4757-a9d1-af1aab8ca197', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1b276f5a-9e53-4ef9-892b-4e4bd0dc09df', 'attached_at': '', 'detached_at': '', 'volume_id': '495cdb58-b7e0-4757-a9d1-af1aab8ca197', 'serial': '495cdb58-b7e0-4757-a9d1-af1aab8ca197'} {{(pid=68674) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1240.734314] env[68674]: DEBUG oslo_vmware.api [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241104, 'name': ReconfigVM_Task, 'duration_secs': 0.256354} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.734645] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Reconfigured VM instance instance-0000007d to attach disk [datastore2] 2f698e5c-6be5-4747-b006-6ed6dd512f79/2f698e5c-6be5-4747-b006-6ed6dd512f79.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1240.734895] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Updating instance '2f698e5c-6be5-4747-b006-6ed6dd512f79' progress to 50 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1240.831790] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-60059494-b577-42d9-8d5b-9d76dec43b26 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Volume attach. Driver type: vmdk {{(pid=68674) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1240.832047] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-60059494-b577-42d9-8d5b-9d76dec43b26 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647726', 'volume_id': 'e293d604-9044-4db3-88ad-4187491aa18a', 'name': 'volume-e293d604-9044-4db3-88ad-4187491aa18a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f4751bd8-e0df-4686-a22f-e51a4a98b8d6', 'attached_at': '', 'detached_at': '', 'volume_id': 'e293d604-9044-4db3-88ad-4187491aa18a', 'serial': 'e293d604-9044-4db3-88ad-4187491aa18a'} {{(pid=68674) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1240.832902] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bba05537-e36d-4a70-bee8-98a1c7732546 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.849618] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9388fa5f-ff56-42c2-a2e5-0e6e14bbaa63 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.876232] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-60059494-b577-42d9-8d5b-9d76dec43b26 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Reconfiguring VM instance instance-0000007a to attach disk [datastore2] volume-e293d604-9044-4db3-88ad-4187491aa18a/volume-e293d604-9044-4db3-88ad-4187491aa18a.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1240.876395] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aea98e19-9935-4a3e-9ec6-fbd4d22aff11 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.890032] env[68674]: DEBUG nova.objects.instance [None req-6fc5921b-f224-4d14-82cf-9e038b65f04a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lazy-loading 'flavor' on Instance uuid 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1240.895991] env[68674]: DEBUG oslo_vmware.api [None req-60059494-b577-42d9-8d5b-9d76dec43b26 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for the task: (returnval){ [ 1240.895991] env[68674]: value = "task-3241105" [ 1240.895991] env[68674]: _type = "Task" [ 1240.895991] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.904163] env[68674]: DEBUG oslo_vmware.api [None req-60059494-b577-42d9-8d5b-9d76dec43b26 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3241105, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.242142] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0357bc11-e54a-4839-9426-4316cbf21d1b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.261095] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-792ff97a-5b45-4f6d-b5df-52943ac3c197 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.279182] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Updating instance '2f698e5c-6be5-4747-b006-6ed6dd512f79' progress to 67 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1241.407904] env[68674]: DEBUG oslo_vmware.api [None req-60059494-b577-42d9-8d5b-9d76dec43b26 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3241105, 'name': ReconfigVM_Task, 'duration_secs': 0.318796} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.408812] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-60059494-b577-42d9-8d5b-9d76dec43b26 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Reconfigured VM instance instance-0000007a to attach disk [datastore2] volume-e293d604-9044-4db3-88ad-4187491aa18a/volume-e293d604-9044-4db3-88ad-4187491aa18a.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1241.414253] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2f12d56d-06c2-4ce2-9c1e-96acf78af021 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.429906] env[68674]: DEBUG oslo_vmware.api [None req-60059494-b577-42d9-8d5b-9d76dec43b26 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for the task: (returnval){ [ 1241.429906] env[68674]: value = "task-3241106" [ 1241.429906] env[68674]: _type = "Task" [ 1241.429906] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.436605] env[68674]: DEBUG oslo_vmware.api [None req-60059494-b577-42d9-8d5b-9d76dec43b26 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3241106, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.478270] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d7a4c3e-039c-4711-8702-fc36b6349d2d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.498244] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Updating instance '30731a3c-34ba-40c8-9b8f-2d867eff4f21' progress to 0 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1241.817546] env[68674]: DEBUG nova.network.neutron [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Port fe2b8729-9ef5-4a98-b38f-405833365c1c binding to destination host cpu-1 is already ACTIVE {{(pid=68674) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1241.897771] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6fc5921b-f224-4d14-82cf-9e038b65f04a tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "1b276f5a-9e53-4ef9-892b-4e4bd0dc09df" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.226s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1241.938439] env[68674]: DEBUG oslo_vmware.api [None req-60059494-b577-42d9-8d5b-9d76dec43b26 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3241106, 'name': ReconfigVM_Task, 'duration_secs': 0.140081} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.938735] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-60059494-b577-42d9-8d5b-9d76dec43b26 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647726', 'volume_id': 'e293d604-9044-4db3-88ad-4187491aa18a', 'name': 'volume-e293d604-9044-4db3-88ad-4187491aa18a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f4751bd8-e0df-4686-a22f-e51a4a98b8d6', 'attached_at': '', 'detached_at': '', 'volume_id': 'e293d604-9044-4db3-88ad-4187491aa18a', 'serial': 'e293d604-9044-4db3-88ad-4187491aa18a'} {{(pid=68674) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1242.004050] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1242.004356] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3a31cf4e-a7ae-4e30-b00b-21b1b89d089c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.011287] env[68674]: DEBUG oslo_vmware.api [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1242.011287] env[68674]: value = "task-3241107" [ 1242.011287] env[68674]: _type = "Task" [ 1242.011287] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.019656] env[68674]: DEBUG oslo_vmware.api [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241107, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.520949] env[68674]: DEBUG oslo_vmware.api [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241107, 'name': PowerOffVM_Task, 'duration_secs': 0.191081} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.521253] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1242.521439] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Updating instance '30731a3c-34ba-40c8-9b8f-2d867eff4f21' progress to 17 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1242.837958] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "2f698e5c-6be5-4747-b006-6ed6dd512f79-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1242.838318] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "2f698e5c-6be5-4747-b006-6ed6dd512f79-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1242.838359] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "2f698e5c-6be5-4747-b006-6ed6dd512f79-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1242.917978] env[68674]: DEBUG oslo_concurrency.lockutils [None req-814982ff-a3f0-468d-9a73-986078f5b63c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "1b276f5a-9e53-4ef9-892b-4e4bd0dc09df" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1242.918245] env[68674]: DEBUG oslo_concurrency.lockutils [None req-814982ff-a3f0-468d-9a73-986078f5b63c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "1b276f5a-9e53-4ef9-892b-4e4bd0dc09df" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1242.918446] env[68674]: DEBUG oslo_concurrency.lockutils [None req-814982ff-a3f0-468d-9a73-986078f5b63c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "1b276f5a-9e53-4ef9-892b-4e4bd0dc09df-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1242.918626] env[68674]: DEBUG oslo_concurrency.lockutils [None req-814982ff-a3f0-468d-9a73-986078f5b63c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "1b276f5a-9e53-4ef9-892b-4e4bd0dc09df-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1242.918792] env[68674]: DEBUG oslo_concurrency.lockutils [None req-814982ff-a3f0-468d-9a73-986078f5b63c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "1b276f5a-9e53-4ef9-892b-4e4bd0dc09df-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1242.920641] env[68674]: INFO nova.compute.manager [None req-814982ff-a3f0-468d-9a73-986078f5b63c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Terminating instance [ 1242.972744] env[68674]: DEBUG nova.objects.instance [None req-60059494-b577-42d9-8d5b-9d76dec43b26 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lazy-loading 'flavor' on Instance uuid f4751bd8-e0df-4686-a22f-e51a4a98b8d6 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1243.029161] env[68674]: DEBUG nova.virt.hardware [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1243.029438] env[68674]: DEBUG nova.virt.hardware [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1243.029597] env[68674]: DEBUG nova.virt.hardware [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1243.029785] env[68674]: DEBUG nova.virt.hardware [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1243.029934] env[68674]: DEBUG nova.virt.hardware [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1243.030099] env[68674]: DEBUG nova.virt.hardware [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1243.030309] env[68674]: DEBUG nova.virt.hardware [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1243.030469] env[68674]: DEBUG nova.virt.hardware [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1243.030637] env[68674]: DEBUG nova.virt.hardware [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1243.030801] env[68674]: DEBUG nova.virt.hardware [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1243.030973] env[68674]: DEBUG nova.virt.hardware [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1243.036048] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44d12c69-81c7-4c74-9219-d6d219814bdb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.052787] env[68674]: DEBUG oslo_vmware.api [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1243.052787] env[68674]: value = "task-3241108" [ 1243.052787] env[68674]: _type = "Task" [ 1243.052787] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.060960] env[68674]: DEBUG oslo_vmware.api [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241108, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.424410] env[68674]: DEBUG nova.compute.manager [None req-814982ff-a3f0-468d-9a73-986078f5b63c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1243.424684] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-814982ff-a3f0-468d-9a73-986078f5b63c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1243.425590] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f2f4a49-ee5e-4440-8465-6605040eecfa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.433465] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-814982ff-a3f0-468d-9a73-986078f5b63c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1243.433720] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fb9dc500-71b0-489f-96a4-983e0ce102ef {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.440307] env[68674]: DEBUG oslo_vmware.api [None req-814982ff-a3f0-468d-9a73-986078f5b63c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1243.440307] env[68674]: value = "task-3241109" [ 1243.440307] env[68674]: _type = "Task" [ 1243.440307] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.448943] env[68674]: DEBUG oslo_vmware.api [None req-814982ff-a3f0-468d-9a73-986078f5b63c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3241109, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.478061] env[68674]: DEBUG oslo_concurrency.lockutils [None req-60059494-b577-42d9-8d5b-9d76dec43b26 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "f4751bd8-e0df-4686-a22f-e51a4a98b8d6" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.239s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1243.564165] env[68674]: DEBUG oslo_vmware.api [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241108, 'name': ReconfigVM_Task, 'duration_secs': 0.17131} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.564548] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Updating instance '30731a3c-34ba-40c8-9b8f-2d867eff4f21' progress to 33 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1243.772433] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b5eca98e-a218-441d-bdeb-905f517ddfea tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquiring lock "f4751bd8-e0df-4686-a22f-e51a4a98b8d6" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1243.772783] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b5eca98e-a218-441d-bdeb-905f517ddfea tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "f4751bd8-e0df-4686-a22f-e51a4a98b8d6" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1243.874710] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "refresh_cache-2f698e5c-6be5-4747-b006-6ed6dd512f79" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1243.875110] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquired lock "refresh_cache-2f698e5c-6be5-4747-b006-6ed6dd512f79" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1243.875110] env[68674]: DEBUG nova.network.neutron [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1243.951959] env[68674]: DEBUG oslo_vmware.api [None req-814982ff-a3f0-468d-9a73-986078f5b63c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3241109, 'name': PowerOffVM_Task, 'duration_secs': 0.149959} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.952239] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-814982ff-a3f0-468d-9a73-986078f5b63c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1243.952407] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-814982ff-a3f0-468d-9a73-986078f5b63c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1243.952666] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cb2b44d9-9089-488a-bb4f-3b318d7cf73a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.022577] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-814982ff-a3f0-468d-9a73-986078f5b63c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1244.022832] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-814982ff-a3f0-468d-9a73-986078f5b63c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1244.022976] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-814982ff-a3f0-468d-9a73-986078f5b63c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Deleting the datastore file [datastore2] 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1244.023265] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cb01bfd0-b7dd-4793-a0dd-a7e6844907c4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.031050] env[68674]: DEBUG oslo_vmware.api [None req-814982ff-a3f0-468d-9a73-986078f5b63c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for the task: (returnval){ [ 1244.031050] env[68674]: value = "task-3241111" [ 1244.031050] env[68674]: _type = "Task" [ 1244.031050] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.039141] env[68674]: DEBUG oslo_vmware.api [None req-814982ff-a3f0-468d-9a73-986078f5b63c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3241111, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.074790] env[68674]: DEBUG nova.virt.hardware [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1244.075039] env[68674]: DEBUG nova.virt.hardware [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1244.075207] env[68674]: DEBUG nova.virt.hardware [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1244.075394] env[68674]: DEBUG nova.virt.hardware [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1244.075544] env[68674]: DEBUG nova.virt.hardware [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1244.075699] env[68674]: DEBUG nova.virt.hardware [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1244.075913] env[68674]: DEBUG nova.virt.hardware [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1244.076094] env[68674]: DEBUG nova.virt.hardware [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1244.076278] env[68674]: DEBUG nova.virt.hardware [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1244.076448] env[68674]: DEBUG nova.virt.hardware [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1244.076656] env[68674]: DEBUG nova.virt.hardware [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1244.082077] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Reconfiguring VM instance instance-0000005d to detach disk 2000 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1244.082383] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-85bc44c3-3095-4e05-a4bd-a1707c336ed3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.101656] env[68674]: DEBUG oslo_vmware.api [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1244.101656] env[68674]: value = "task-3241112" [ 1244.101656] env[68674]: _type = "Task" [ 1244.101656] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.113141] env[68674]: DEBUG oslo_vmware.api [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241112, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.275911] env[68674]: INFO nova.compute.manager [None req-b5eca98e-a218-441d-bdeb-905f517ddfea tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Detaching volume 4e2f6062-a545-44e7-8eff-1758bf08c9e3 [ 1244.307578] env[68674]: INFO nova.virt.block_device [None req-b5eca98e-a218-441d-bdeb-905f517ddfea tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Attempting to driver detach volume 4e2f6062-a545-44e7-8eff-1758bf08c9e3 from mountpoint /dev/sdb [ 1244.307832] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-b5eca98e-a218-441d-bdeb-905f517ddfea tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Volume detach. Driver type: vmdk {{(pid=68674) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1244.308035] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-b5eca98e-a218-441d-bdeb-905f517ddfea tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647724', 'volume_id': '4e2f6062-a545-44e7-8eff-1758bf08c9e3', 'name': 'volume-4e2f6062-a545-44e7-8eff-1758bf08c9e3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f4751bd8-e0df-4686-a22f-e51a4a98b8d6', 'attached_at': '', 'detached_at': '', 'volume_id': '4e2f6062-a545-44e7-8eff-1758bf08c9e3', 'serial': '4e2f6062-a545-44e7-8eff-1758bf08c9e3'} {{(pid=68674) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1244.308944] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca5d35a5-0356-4816-a495-93460903e546 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.333051] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b2089f5-4f41-4c92-bc69-ef1916c57da1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.339945] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23edeb0d-370e-4dba-9bcf-01732cb63fae {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.362444] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dcf7185-8a2e-4dac-9668-abd3ff8d4efb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.379586] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-b5eca98e-a218-441d-bdeb-905f517ddfea tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] The volume has not been displaced from its original location: [datastore2] volume-4e2f6062-a545-44e7-8eff-1758bf08c9e3/volume-4e2f6062-a545-44e7-8eff-1758bf08c9e3.vmdk. No consolidation needed. {{(pid=68674) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1244.385141] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-b5eca98e-a218-441d-bdeb-905f517ddfea tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Reconfiguring VM instance instance-0000007a to detach disk 2001 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1244.385586] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-40a8f482-8d9f-473d-9e57-8641f7cfadac {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.414926] env[68674]: DEBUG oslo_vmware.api [None req-b5eca98e-a218-441d-bdeb-905f517ddfea tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for the task: (returnval){ [ 1244.414926] env[68674]: value = "task-3241113" [ 1244.414926] env[68674]: _type = "Task" [ 1244.414926] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.426687] env[68674]: DEBUG oslo_vmware.api [None req-b5eca98e-a218-441d-bdeb-905f517ddfea tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3241113, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.542468] env[68674]: DEBUG oslo_vmware.api [None req-814982ff-a3f0-468d-9a73-986078f5b63c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Task: {'id': task-3241111, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147585} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.542720] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-814982ff-a3f0-468d-9a73-986078f5b63c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1244.542902] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-814982ff-a3f0-468d-9a73-986078f5b63c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1244.543094] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-814982ff-a3f0-468d-9a73-986078f5b63c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1244.543276] env[68674]: INFO nova.compute.manager [None req-814982ff-a3f0-468d-9a73-986078f5b63c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1244.543541] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-814982ff-a3f0-468d-9a73-986078f5b63c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1244.543726] env[68674]: DEBUG nova.compute.manager [-] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1244.543818] env[68674]: DEBUG nova.network.neutron [-] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1244.611021] env[68674]: DEBUG oslo_vmware.api [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241112, 'name': ReconfigVM_Task, 'duration_secs': 0.175966} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.611425] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Reconfigured VM instance instance-0000005d to detach disk 2000 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1244.612256] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf737a4c-e9e2-47cc-9566-2f27ce5b8ce2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.617319] env[68674]: DEBUG nova.network.neutron [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Updating instance_info_cache with network_info: [{"id": "fe2b8729-9ef5-4a98-b38f-405833365c1c", "address": "fa:16:3e:d2:2a:2b", "network": {"id": "14f41484-287c-4789-9e0c-fcc5f0e92e0d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-787923662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81afe76c94de4e94b53f15af0ef95e66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "877ed63d-906e-4bd5-a1fc-7e82d172d41e", "external-id": "nsx-vlan-transportzone-642", "segmentation_id": 642, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe2b8729-9e", "ovs_interfaceid": "fe2b8729-9ef5-4a98-b38f-405833365c1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1244.637511] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] 30731a3c-34ba-40c8-9b8f-2d867eff4f21/30731a3c-34ba-40c8-9b8f-2d867eff4f21.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1244.638827] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-57cf0e96-b790-421c-b7a6-cffca40beab4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.658845] env[68674]: DEBUG oslo_vmware.api [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1244.658845] env[68674]: value = "task-3241114" [ 1244.658845] env[68674]: _type = "Task" [ 1244.658845] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.667566] env[68674]: DEBUG oslo_vmware.api [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241114, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.928320] env[68674]: DEBUG oslo_vmware.api [None req-b5eca98e-a218-441d-bdeb-905f517ddfea tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3241113, 'name': ReconfigVM_Task, 'duration_secs': 0.346547} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.928609] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-b5eca98e-a218-441d-bdeb-905f517ddfea tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Reconfigured VM instance instance-0000007a to detach disk 2001 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1244.933409] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-77f97460-9987-471e-a307-aeb69f928ebf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.948473] env[68674]: DEBUG oslo_vmware.api [None req-b5eca98e-a218-441d-bdeb-905f517ddfea tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for the task: (returnval){ [ 1244.948473] env[68674]: value = "task-3241115" [ 1244.948473] env[68674]: _type = "Task" [ 1244.948473] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.956033] env[68674]: DEBUG oslo_vmware.api [None req-b5eca98e-a218-441d-bdeb-905f517ddfea tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3241115, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.050381] env[68674]: DEBUG nova.compute.manager [req-cf10f3a7-23f0-4ea0-9423-d0156005371a req-e64bda0a-32dd-4b90-97ce-299c41dd977d service nova] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Received event network-vif-deleted-43bd7986-ab0b-4dd8-a224-a42cd649e0d0 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1245.050661] env[68674]: INFO nova.compute.manager [req-cf10f3a7-23f0-4ea0-9423-d0156005371a req-e64bda0a-32dd-4b90-97ce-299c41dd977d service nova] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Neutron deleted interface 43bd7986-ab0b-4dd8-a224-a42cd649e0d0; detaching it from the instance and deleting it from the info cache [ 1245.050796] env[68674]: DEBUG nova.network.neutron [req-cf10f3a7-23f0-4ea0-9423-d0156005371a req-e64bda0a-32dd-4b90-97ce-299c41dd977d service nova] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1245.119803] env[68674]: DEBUG oslo_concurrency.lockutils [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Releasing lock "refresh_cache-2f698e5c-6be5-4747-b006-6ed6dd512f79" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1245.168671] env[68674]: DEBUG oslo_vmware.api [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241114, 'name': ReconfigVM_Task, 'duration_secs': 0.27034} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.168824] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Reconfigured VM instance instance-0000005d to attach disk [datastore1] 30731a3c-34ba-40c8-9b8f-2d867eff4f21/30731a3c-34ba-40c8-9b8f-2d867eff4f21.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1245.169079] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Updating instance '30731a3c-34ba-40c8-9b8f-2d867eff4f21' progress to 50 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1245.458294] env[68674]: DEBUG oslo_vmware.api [None req-b5eca98e-a218-441d-bdeb-905f517ddfea tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3241115, 'name': ReconfigVM_Task, 'duration_secs': 0.137486} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.458567] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-b5eca98e-a218-441d-bdeb-905f517ddfea tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647724', 'volume_id': '4e2f6062-a545-44e7-8eff-1758bf08c9e3', 'name': 'volume-4e2f6062-a545-44e7-8eff-1758bf08c9e3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f4751bd8-e0df-4686-a22f-e51a4a98b8d6', 'attached_at': '', 'detached_at': '', 'volume_id': '4e2f6062-a545-44e7-8eff-1758bf08c9e3', 'serial': '4e2f6062-a545-44e7-8eff-1758bf08c9e3'} {{(pid=68674) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1245.530815] env[68674]: DEBUG nova.network.neutron [-] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1245.553562] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1f185fe1-2166-4c42-b3b0-88b1e975aa34 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.564046] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55ede838-4b03-4e8e-bd7f-26423b24fda5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.594284] env[68674]: DEBUG nova.compute.manager [req-cf10f3a7-23f0-4ea0-9423-d0156005371a req-e64bda0a-32dd-4b90-97ce-299c41dd977d service nova] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Detach interface failed, port_id=43bd7986-ab0b-4dd8-a224-a42cd649e0d0, reason: Instance 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1245.646800] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5946e29f-24d2-4d38-9aa8-243e8035743b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.666039] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bff315b-d083-4fca-98c2-c6badb0851d2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.672870] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Updating instance '2f698e5c-6be5-4747-b006-6ed6dd512f79' progress to 83 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1245.679746] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09615a7c-ab5d-4e30-b923-2ff765d8b82b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.696274] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-665c9545-c7cd-4190-af7f-5a4c0b3f1c4e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.713258] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Updating instance '30731a3c-34ba-40c8-9b8f-2d867eff4f21' progress to 67 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1246.000429] env[68674]: DEBUG nova.objects.instance [None req-b5eca98e-a218-441d-bdeb-905f517ddfea tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lazy-loading 'flavor' on Instance uuid f4751bd8-e0df-4686-a22f-e51a4a98b8d6 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1246.033698] env[68674]: INFO nova.compute.manager [-] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Took 1.49 seconds to deallocate network for instance. [ 1246.181116] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1246.181611] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-edb9b009-642b-4aab-8dc7-9763948c8ca2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.190212] env[68674]: DEBUG oslo_vmware.api [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1246.190212] env[68674]: value = "task-3241116" [ 1246.190212] env[68674]: _type = "Task" [ 1246.190212] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.202594] env[68674]: DEBUG oslo_vmware.api [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241116, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.254358] env[68674]: DEBUG nova.network.neutron [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Port fa6ef6fe-e229-4cc2-8230-7318adaa728e binding to destination host cpu-1 is already ACTIVE {{(pid=68674) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1246.539627] env[68674]: DEBUG oslo_concurrency.lockutils [None req-814982ff-a3f0-468d-9a73-986078f5b63c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1246.539896] env[68674]: DEBUG oslo_concurrency.lockutils [None req-814982ff-a3f0-468d-9a73-986078f5b63c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1246.540147] env[68674]: DEBUG nova.objects.instance [None req-814982ff-a3f0-468d-9a73-986078f5b63c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lazy-loading 'resources' on Instance uuid 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1246.699676] env[68674]: DEBUG oslo_vmware.api [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241116, 'name': PowerOnVM_Task, 'duration_secs': 0.389802} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.699945] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1246.700164] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-2cefa00c-e33c-46fe-8440-01486ce4cdfa tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Updating instance '2f698e5c-6be5-4747-b006-6ed6dd512f79' progress to 100 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1247.011035] env[68674]: DEBUG oslo_concurrency.lockutils [None req-b5eca98e-a218-441d-bdeb-905f517ddfea tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "f4751bd8-e0df-4686-a22f-e51a4a98b8d6" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.238s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1247.032916] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1e379060-935b-4d47-8c73-5e08d98832d4 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquiring lock "f4751bd8-e0df-4686-a22f-e51a4a98b8d6" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1247.033174] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1e379060-935b-4d47-8c73-5e08d98832d4 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "f4751bd8-e0df-4686-a22f-e51a4a98b8d6" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1247.130701] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-742b9104-dbbf-46f2-86c7-799635a5118c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.138311] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddbdd633-994c-43f6-9180-c665a86c790f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.169104] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daea410c-10c3-41eb-9eef-26afde57612f {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.175774] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f044f6c-88d2-4223-8b3d-4d30e03c1852 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.188695] env[68674]: DEBUG nova.compute.provider_tree [None req-814982ff-a3f0-468d-9a73-986078f5b63c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1247.275152] env[68674]: DEBUG oslo_concurrency.lockutils [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "30731a3c-34ba-40c8-9b8f-2d867eff4f21-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1247.275372] env[68674]: DEBUG oslo_concurrency.lockutils [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "30731a3c-34ba-40c8-9b8f-2d867eff4f21-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1247.275546] env[68674]: DEBUG oslo_concurrency.lockutils [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "30731a3c-34ba-40c8-9b8f-2d867eff4f21-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1247.536130] env[68674]: INFO nova.compute.manager [None req-1e379060-935b-4d47-8c73-5e08d98832d4 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Detaching volume e293d604-9044-4db3-88ad-4187491aa18a [ 1247.570510] env[68674]: INFO nova.virt.block_device [None req-1e379060-935b-4d47-8c73-5e08d98832d4 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Attempting to driver detach volume e293d604-9044-4db3-88ad-4187491aa18a from mountpoint /dev/sdc [ 1247.570762] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e379060-935b-4d47-8c73-5e08d98832d4 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Volume detach. Driver type: vmdk {{(pid=68674) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1247.570954] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e379060-935b-4d47-8c73-5e08d98832d4 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647726', 'volume_id': 'e293d604-9044-4db3-88ad-4187491aa18a', 'name': 'volume-e293d604-9044-4db3-88ad-4187491aa18a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f4751bd8-e0df-4686-a22f-e51a4a98b8d6', 'attached_at': '', 'detached_at': '', 'volume_id': 'e293d604-9044-4db3-88ad-4187491aa18a', 'serial': 'e293d604-9044-4db3-88ad-4187491aa18a'} {{(pid=68674) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1247.571849] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-170e9f5a-0168-4d63-ae7e-3d2dd600ccdc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.593445] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d209a41-be0f-4a19-a52f-3991e432ab18 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.600152] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0f36dc7-a3ba-47ae-9858-a4adc94cf204 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.619560] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a532c3f-4415-41d6-8e0e-84fc51689c51 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.635166] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e379060-935b-4d47-8c73-5e08d98832d4 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] The volume has not been displaced from its original location: [datastore2] volume-e293d604-9044-4db3-88ad-4187491aa18a/volume-e293d604-9044-4db3-88ad-4187491aa18a.vmdk. No consolidation needed. {{(pid=68674) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1247.640648] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e379060-935b-4d47-8c73-5e08d98832d4 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Reconfiguring VM instance instance-0000007a to detach disk 2002 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1247.640959] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5178a3c8-e2ae-43f2-bffc-7b47339154b9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.671893] env[68674]: DEBUG oslo_vmware.api [None req-1e379060-935b-4d47-8c73-5e08d98832d4 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for the task: (returnval){ [ 1247.671893] env[68674]: value = "task-3241117" [ 1247.671893] env[68674]: _type = "Task" [ 1247.671893] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.671893] env[68674]: DEBUG oslo_vmware.api [None req-1e379060-935b-4d47-8c73-5e08d98832d4 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3241117, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.693024] env[68674]: DEBUG nova.scheduler.client.report [None req-814982ff-a3f0-468d-9a73-986078f5b63c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1248.170066] env[68674]: DEBUG oslo_vmware.api [None req-1e379060-935b-4d47-8c73-5e08d98832d4 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3241117, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.199051] env[68674]: DEBUG oslo_concurrency.lockutils [None req-814982ff-a3f0-468d-9a73-986078f5b63c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.659s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1248.220632] env[68674]: INFO nova.scheduler.client.report [None req-814982ff-a3f0-468d-9a73-986078f5b63c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Deleted allocations for instance 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df [ 1248.308664] env[68674]: DEBUG oslo_concurrency.lockutils [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "refresh_cache-30731a3c-34ba-40c8-9b8f-2d867eff4f21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1248.308862] env[68674]: DEBUG oslo_concurrency.lockutils [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquired lock "refresh_cache-30731a3c-34ba-40c8-9b8f-2d867eff4f21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1248.309086] env[68674]: DEBUG nova.network.neutron [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1248.671224] env[68674]: DEBUG oslo_vmware.api [None req-1e379060-935b-4d47-8c73-5e08d98832d4 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3241117, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.727780] env[68674]: DEBUG oslo_concurrency.lockutils [None req-814982ff-a3f0-468d-9a73-986078f5b63c tempest-AttachVolumeNegativeTest-4446692 tempest-AttachVolumeNegativeTest-4446692-project-member] Lock "1b276f5a-9e53-4ef9-892b-4e4bd0dc09df" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.809s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1249.061678] env[68674]: DEBUG nova.network.neutron [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Updating instance_info_cache with network_info: [{"id": "fa6ef6fe-e229-4cc2-8230-7318adaa728e", "address": "fa:16:3e:6c:f7:a2", "network": {"id": "f2a6b57a-fec9-4bd2-9828-2b72f21f2393", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1479923638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.236", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f2a133c72064227bd419d63d5d9557f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa6ef6fe-e2", "ovs_interfaceid": "fa6ef6fe-e229-4cc2-8230-7318adaa728e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1249.171054] env[68674]: DEBUG oslo_vmware.api [None req-1e379060-935b-4d47-8c73-5e08d98832d4 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3241117, 'name': ReconfigVM_Task, 'duration_secs': 1.204147} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.171307] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e379060-935b-4d47-8c73-5e08d98832d4 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Reconfigured VM instance instance-0000007a to detach disk 2002 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1249.175966] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8b2a7680-c321-4c89-8eb8-8e4b28bc0e7a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.190771] env[68674]: DEBUG oslo_vmware.api [None req-1e379060-935b-4d47-8c73-5e08d98832d4 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for the task: (returnval){ [ 1249.190771] env[68674]: value = "task-3241118" [ 1249.190771] env[68674]: _type = "Task" [ 1249.190771] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.198695] env[68674]: DEBUG oslo_vmware.api [None req-1e379060-935b-4d47-8c73-5e08d98832d4 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3241118, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.463731] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "2f698e5c-6be5-4747-b006-6ed6dd512f79" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1249.464020] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "2f698e5c-6be5-4747-b006-6ed6dd512f79" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1249.464238] env[68674]: DEBUG nova.compute.manager [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Going to confirm migration 8 {{(pid=68674) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1249.564134] env[68674]: DEBUG oslo_concurrency.lockutils [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Releasing lock "refresh_cache-30731a3c-34ba-40c8-9b8f-2d867eff4f21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1249.701747] env[68674]: DEBUG oslo_vmware.api [None req-1e379060-935b-4d47-8c73-5e08d98832d4 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3241118, 'name': ReconfigVM_Task, 'duration_secs': 0.139278} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.702063] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e379060-935b-4d47-8c73-5e08d98832d4 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647726', 'volume_id': 'e293d604-9044-4db3-88ad-4187491aa18a', 'name': 'volume-e293d604-9044-4db3-88ad-4187491aa18a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f4751bd8-e0df-4686-a22f-e51a4a98b8d6', 'attached_at': '', 'detached_at': '', 'volume_id': 'e293d604-9044-4db3-88ad-4187491aa18a', 'serial': 'e293d604-9044-4db3-88ad-4187491aa18a'} {{(pid=68674) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1250.028498] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "refresh_cache-2f698e5c-6be5-4747-b006-6ed6dd512f79" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1250.028694] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquired lock "refresh_cache-2f698e5c-6be5-4747-b006-6ed6dd512f79" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1250.028874] env[68674]: DEBUG nova.network.neutron [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1250.029106] env[68674]: DEBUG nova.objects.instance [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lazy-loading 'info_cache' on Instance uuid 2f698e5c-6be5-4747-b006-6ed6dd512f79 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1250.088593] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1d259f3-c620-4866-9eca-1ef6fd3e467d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.107815] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c36abaa-ab7a-4b1f-8be0-0327078a3a65 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.114265] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Updating instance '30731a3c-34ba-40c8-9b8f-2d867eff4f21' progress to 83 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1250.244987] env[68674]: DEBUG nova.objects.instance [None req-1e379060-935b-4d47-8c73-5e08d98832d4 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lazy-loading 'flavor' on Instance uuid f4751bd8-e0df-4686-a22f-e51a4a98b8d6 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1250.620716] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1250.620958] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-86854f45-ff56-41af-8450-74c705a4d0cb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.628706] env[68674]: DEBUG oslo_vmware.api [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1250.628706] env[68674]: value = "task-3241120" [ 1250.628706] env[68674]: _type = "Task" [ 1250.628706] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.636630] env[68674]: DEBUG oslo_vmware.api [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241120, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.137519] env[68674]: DEBUG oslo_vmware.api [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241120, 'name': PowerOnVM_Task, 'duration_secs': 0.410572} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.139674] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1251.139861] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-03b25356-42e7-4e70-820c-af48d52fca65 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Updating instance '30731a3c-34ba-40c8-9b8f-2d867eff4f21' progress to 100 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1251.225131] env[68674]: DEBUG nova.network.neutron [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Updating instance_info_cache with network_info: [{"id": "fe2b8729-9ef5-4a98-b38f-405833365c1c", "address": "fa:16:3e:d2:2a:2b", "network": {"id": "14f41484-287c-4789-9e0c-fcc5f0e92e0d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-787923662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81afe76c94de4e94b53f15af0ef95e66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "877ed63d-906e-4bd5-a1fc-7e82d172d41e", "external-id": "nsx-vlan-transportzone-642", "segmentation_id": 642, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe2b8729-9e", "ovs_interfaceid": "fe2b8729-9ef5-4a98-b38f-405833365c1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1251.252173] env[68674]: DEBUG oslo_concurrency.lockutils [None req-1e379060-935b-4d47-8c73-5e08d98832d4 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "f4751bd8-e0df-4686-a22f-e51a4a98b8d6" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.219s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1251.727578] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Releasing lock "refresh_cache-2f698e5c-6be5-4747-b006-6ed6dd512f79" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1251.727854] env[68674]: DEBUG nova.objects.instance [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lazy-loading 'migration_context' on Instance uuid 2f698e5c-6be5-4747-b006-6ed6dd512f79 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1252.231321] env[68674]: DEBUG nova.objects.base [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Object Instance<2f698e5c-6be5-4747-b006-6ed6dd512f79> lazy-loaded attributes: info_cache,migration_context {{(pid=68674) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1252.232035] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ef910a-1cfe-4335-a957-6f2cf81db656 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.251519] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2a9c057-bfda-41f2-adde-b85f79983d6c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.256371] env[68674]: DEBUG oslo_vmware.api [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1252.256371] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52605cda-e9c3-f854-8bbb-9a1bae27aa18" [ 1252.256371] env[68674]: _type = "Task" [ 1252.256371] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.263886] env[68674]: DEBUG oslo_vmware.api [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52605cda-e9c3-f854-8bbb-9a1bae27aa18, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.446161] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6888b700-3626-4ba6-96b0-7e23fb274f17 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquiring lock "f4751bd8-e0df-4686-a22f-e51a4a98b8d6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1252.446428] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6888b700-3626-4ba6-96b0-7e23fb274f17 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "f4751bd8-e0df-4686-a22f-e51a4a98b8d6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1252.446638] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6888b700-3626-4ba6-96b0-7e23fb274f17 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquiring lock "f4751bd8-e0df-4686-a22f-e51a4a98b8d6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1252.446867] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6888b700-3626-4ba6-96b0-7e23fb274f17 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "f4751bd8-e0df-4686-a22f-e51a4a98b8d6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1252.447053] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6888b700-3626-4ba6-96b0-7e23fb274f17 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "f4751bd8-e0df-4686-a22f-e51a4a98b8d6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1252.449182] env[68674]: INFO nova.compute.manager [None req-6888b700-3626-4ba6-96b0-7e23fb274f17 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Terminating instance [ 1252.766310] env[68674]: DEBUG oslo_vmware.api [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52605cda-e9c3-f854-8bbb-9a1bae27aa18, 'name': SearchDatastore_Task, 'duration_secs': 0.008831} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.766565] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1252.766836] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1252.952652] env[68674]: DEBUG nova.compute.manager [None req-6888b700-3626-4ba6-96b0-7e23fb274f17 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1252.952855] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6888b700-3626-4ba6-96b0-7e23fb274f17 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1252.953748] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52889cff-d4f9-45eb-82c3-a500cb6ad847 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.961747] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6888b700-3626-4ba6-96b0-7e23fb274f17 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1252.961967] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cb862053-60bc-470f-859f-73d0bd8ee4a3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.967939] env[68674]: DEBUG oslo_vmware.api [None req-6888b700-3626-4ba6-96b0-7e23fb274f17 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for the task: (returnval){ [ 1252.967939] env[68674]: value = "task-3241122" [ 1252.967939] env[68674]: _type = "Task" [ 1252.967939] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.975318] env[68674]: DEBUG oslo_vmware.api [None req-6888b700-3626-4ba6-96b0-7e23fb274f17 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3241122, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.128765] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0b19d461-7496-4d03-b25e-0157fa1d766b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "30731a3c-34ba-40c8-9b8f-2d867eff4f21" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1253.129071] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0b19d461-7496-4d03-b25e-0157fa1d766b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "30731a3c-34ba-40c8-9b8f-2d867eff4f21" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1253.129271] env[68674]: DEBUG nova.compute.manager [None req-0b19d461-7496-4d03-b25e-0157fa1d766b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Going to confirm migration 9 {{(pid=68674) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1253.356611] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86d6a3d4-8904-49a6-8136-9c0541a7776e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.364121] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9701e4d1-3939-4d87-af1c-7220b00058ed {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.393886] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-366367ae-9fef-486b-8f12-685f00e0a27d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.401192] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fa8c26f-3404-4e2b-afef-4ed4213d0d73 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.414324] env[68674]: DEBUG nova.compute.provider_tree [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1253.476779] env[68674]: DEBUG oslo_vmware.api [None req-6888b700-3626-4ba6-96b0-7e23fb274f17 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3241122, 'name': PowerOffVM_Task, 'duration_secs': 0.220373} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.477061] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-6888b700-3626-4ba6-96b0-7e23fb274f17 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1253.477234] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6888b700-3626-4ba6-96b0-7e23fb274f17 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1253.477471] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eedd401a-cc70-4cc5-a700-7e7a2790fb1e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.544429] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6888b700-3626-4ba6-96b0-7e23fb274f17 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1253.544644] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6888b700-3626-4ba6-96b0-7e23fb274f17 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1253.544826] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-6888b700-3626-4ba6-96b0-7e23fb274f17 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Deleting the datastore file [datastore1] f4751bd8-e0df-4686-a22f-e51a4a98b8d6 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1253.545105] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a229d65f-e889-43dd-9e11-25b378c554cf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.551683] env[68674]: DEBUG oslo_vmware.api [None req-6888b700-3626-4ba6-96b0-7e23fb274f17 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for the task: (returnval){ [ 1253.551683] env[68674]: value = "task-3241124" [ 1253.551683] env[68674]: _type = "Task" [ 1253.551683] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.559337] env[68674]: DEBUG oslo_vmware.api [None req-6888b700-3626-4ba6-96b0-7e23fb274f17 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3241124, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.700180] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0b19d461-7496-4d03-b25e-0157fa1d766b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "refresh_cache-30731a3c-34ba-40c8-9b8f-2d867eff4f21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1253.700395] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0b19d461-7496-4d03-b25e-0157fa1d766b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquired lock "refresh_cache-30731a3c-34ba-40c8-9b8f-2d867eff4f21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1253.700594] env[68674]: DEBUG nova.network.neutron [None req-0b19d461-7496-4d03-b25e-0157fa1d766b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1253.700792] env[68674]: DEBUG nova.objects.instance [None req-0b19d461-7496-4d03-b25e-0157fa1d766b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lazy-loading 'info_cache' on Instance uuid 30731a3c-34ba-40c8-9b8f-2d867eff4f21 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1253.917198] env[68674]: DEBUG nova.scheduler.client.report [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1254.061782] env[68674]: DEBUG oslo_vmware.api [None req-6888b700-3626-4ba6-96b0-7e23fb274f17 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Task: {'id': task-3241124, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.265133} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.062023] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-6888b700-3626-4ba6-96b0-7e23fb274f17 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1254.062244] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6888b700-3626-4ba6-96b0-7e23fb274f17 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1254.062428] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-6888b700-3626-4ba6-96b0-7e23fb274f17 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1254.062604] env[68674]: INFO nova.compute.manager [None req-6888b700-3626-4ba6-96b0-7e23fb274f17 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1254.062847] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6888b700-3626-4ba6-96b0-7e23fb274f17 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1254.063052] env[68674]: DEBUG nova.compute.manager [-] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1254.063152] env[68674]: DEBUG nova.network.neutron [-] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1254.502542] env[68674]: DEBUG nova.compute.manager [req-7cd287ea-ef1a-4c48-b0f3-9d2587bc332c req-2d72fcc4-cc3e-4321-ae62-64d7073c2eb4 service nova] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Received event network-vif-deleted-393444bd-993f-4249-b58b-e01020f12db7 {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1254.502820] env[68674]: INFO nova.compute.manager [req-7cd287ea-ef1a-4c48-b0f3-9d2587bc332c req-2d72fcc4-cc3e-4321-ae62-64d7073c2eb4 service nova] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Neutron deleted interface 393444bd-993f-4249-b58b-e01020f12db7; detaching it from the instance and deleting it from the info cache [ 1254.502964] env[68674]: DEBUG nova.network.neutron [req-7cd287ea-ef1a-4c48-b0f3-9d2587bc332c req-2d72fcc4-cc3e-4321-ae62-64d7073c2eb4 service nova] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1254.911929] env[68674]: DEBUG nova.network.neutron [None req-0b19d461-7496-4d03-b25e-0157fa1d766b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Updating instance_info_cache with network_info: [{"id": "fa6ef6fe-e229-4cc2-8230-7318adaa728e", "address": "fa:16:3e:6c:f7:a2", "network": {"id": "f2a6b57a-fec9-4bd2-9828-2b72f21f2393", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1479923638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.236", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f2a133c72064227bd419d63d5d9557f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa6ef6fe-e2", "ovs_interfaceid": "fa6ef6fe-e229-4cc2-8230-7318adaa728e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1254.926395] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.159s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1254.981531] env[68674]: DEBUG nova.network.neutron [-] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1255.005267] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4b851541-dae4-41c1-ba94-7c1e306ca18e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.016082] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e35cfbe-8cef-44f0-9214-c42bb364320b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.043166] env[68674]: DEBUG nova.compute.manager [req-7cd287ea-ef1a-4c48-b0f3-9d2587bc332c req-2d72fcc4-cc3e-4321-ae62-64d7073c2eb4 service nova] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Detach interface failed, port_id=393444bd-993f-4249-b58b-e01020f12db7, reason: Instance f4751bd8-e0df-4686-a22f-e51a4a98b8d6 could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1255.414887] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0b19d461-7496-4d03-b25e-0157fa1d766b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Releasing lock "refresh_cache-30731a3c-34ba-40c8-9b8f-2d867eff4f21" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1255.415206] env[68674]: DEBUG nova.objects.instance [None req-0b19d461-7496-4d03-b25e-0157fa1d766b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lazy-loading 'migration_context' on Instance uuid 30731a3c-34ba-40c8-9b8f-2d867eff4f21 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1255.484529] env[68674]: INFO nova.compute.manager [-] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Took 1.42 seconds to deallocate network for instance. [ 1255.489717] env[68674]: INFO nova.scheduler.client.report [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Deleted allocation for migration 0a5b3e05-4b5a-459f-85f3-ddd65c04f099 [ 1255.918781] env[68674]: DEBUG nova.objects.base [None req-0b19d461-7496-4d03-b25e-0157fa1d766b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Object Instance<30731a3c-34ba-40c8-9b8f-2d867eff4f21> lazy-loaded attributes: info_cache,migration_context {{(pid=68674) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1255.919736] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cdf78ba-3a56-4aad-a404-3a96f8304f00 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.945354] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee3524cb-6eb7-43bb-adaf-78e96ef4fe7b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.952713] env[68674]: DEBUG oslo_vmware.api [None req-0b19d461-7496-4d03-b25e-0157fa1d766b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1255.952713] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523436ef-eb20-84bd-7d79-cc16796561d4" [ 1255.952713] env[68674]: _type = "Task" [ 1255.952713] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.964588] env[68674]: DEBUG oslo_vmware.api [None req-0b19d461-7496-4d03-b25e-0157fa1d766b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523436ef-eb20-84bd-7d79-cc16796561d4, 'name': SearchDatastore_Task, 'duration_secs': 0.007085} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.965017] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0b19d461-7496-4d03-b25e-0157fa1d766b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1255.965419] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0b19d461-7496-4d03-b25e-0157fa1d766b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1255.994681] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6888b700-3626-4ba6-96b0-7e23fb274f17 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1255.995372] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "2f698e5c-6be5-4747-b006-6ed6dd512f79" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.531s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1256.526440] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b251d606-c820-4ce9-bab8-bee36b3a4e58 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.534220] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b98c7c48-a873-49b2-8852-6a8bfaeba902 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.563041] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24221962-ed7c-4fe3-8ab7-d9c3be521561 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.570197] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7736aad6-4ffe-4c6b-9d3a-3feeacfbf6dc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.582928] env[68674]: DEBUG nova.compute.provider_tree [None req-0b19d461-7496-4d03-b25e-0157fa1d766b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1257.085672] env[68674]: DEBUG nova.scheduler.client.report [None req-0b19d461-7496-4d03-b25e-0157fa1d766b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1257.486824] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "2f698e5c-6be5-4747-b006-6ed6dd512f79" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1257.487152] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "2f698e5c-6be5-4747-b006-6ed6dd512f79" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1257.487373] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "2f698e5c-6be5-4747-b006-6ed6dd512f79-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1257.487560] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "2f698e5c-6be5-4747-b006-6ed6dd512f79-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1257.487739] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "2f698e5c-6be5-4747-b006-6ed6dd512f79-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1257.489819] env[68674]: INFO nova.compute.manager [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Terminating instance [ 1257.993768] env[68674]: DEBUG nova.compute.manager [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1257.993980] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1257.995387] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53957d66-925f-4f19-8248-5f3d57704b66 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.002929] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1258.003166] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6f9f5f2b-b734-49ac-a1e6-1f0d740f0295 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.008763] env[68674]: DEBUG oslo_vmware.api [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1258.008763] env[68674]: value = "task-3241126" [ 1258.008763] env[68674]: _type = "Task" [ 1258.008763] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.016240] env[68674]: DEBUG oslo_vmware.api [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241126, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.096967] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0b19d461-7496-4d03-b25e-0157fa1d766b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.131s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1258.099787] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6888b700-3626-4ba6-96b0-7e23fb274f17 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.105s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1258.100014] env[68674]: DEBUG nova.objects.instance [None req-6888b700-3626-4ba6-96b0-7e23fb274f17 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lazy-loading 'resources' on Instance uuid f4751bd8-e0df-4686-a22f-e51a4a98b8d6 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1258.523097] env[68674]: DEBUG oslo_vmware.api [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241126, 'name': PowerOffVM_Task, 'duration_secs': 0.198787} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.523372] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1258.523543] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1258.523781] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a948f91c-7285-432a-bf44-5445c60fe361 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.585051] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1258.585197] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1258.585270] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Deleting the datastore file [datastore2] 2f698e5c-6be5-4747-b006-6ed6dd512f79 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1258.585799] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3d7bb7aa-ab4d-4044-87b9-84e4017cd1ce {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.591861] env[68674]: DEBUG oslo_vmware.api [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for the task: (returnval){ [ 1258.591861] env[68674]: value = "task-3241128" [ 1258.591861] env[68674]: _type = "Task" [ 1258.591861] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.599284] env[68674]: DEBUG oslo_vmware.api [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241128, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.659698] env[68674]: INFO nova.scheduler.client.report [None req-0b19d461-7496-4d03-b25e-0157fa1d766b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Deleted allocation for migration 6ce6bc59-5dc9-4a62-87aa-05653c67fb99 [ 1258.670022] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dce7c7c-c637-4cdc-b1ff-596bbf76dbab {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.676795] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e178b87-9f85-4b8a-8e33-539a1d874ac4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.709173] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbcccf1b-7868-4be7-8d0b-4f3a1f318ba8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.716706] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc08d05a-3e3d-4f63-be31-3617ba854fe3 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.730535] env[68674]: DEBUG nova.compute.provider_tree [None req-6888b700-3626-4ba6-96b0-7e23fb274f17 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1259.007316] env[68674]: DEBUG oslo_concurrency.lockutils [None req-79c040af-6982-48f7-b4ba-6f6e0554f326 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "30731a3c-34ba-40c8-9b8f-2d867eff4f21" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1259.101669] env[68674]: DEBUG oslo_vmware.api [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Task: {'id': task-3241128, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162414} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.101992] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1259.102123] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1259.102310] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1259.102565] env[68674]: INFO nova.compute.manager [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1259.102840] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1259.103088] env[68674]: DEBUG nova.compute.manager [-] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1259.103216] env[68674]: DEBUG nova.network.neutron [-] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1259.165891] env[68674]: DEBUG oslo_concurrency.lockutils [None req-0b19d461-7496-4d03-b25e-0157fa1d766b tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "30731a3c-34ba-40c8-9b8f-2d867eff4f21" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.037s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1259.170303] env[68674]: DEBUG oslo_concurrency.lockutils [None req-79c040af-6982-48f7-b4ba-6f6e0554f326 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "30731a3c-34ba-40c8-9b8f-2d867eff4f21" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.160s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1259.170303] env[68674]: DEBUG oslo_concurrency.lockutils [None req-79c040af-6982-48f7-b4ba-6f6e0554f326 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "30731a3c-34ba-40c8-9b8f-2d867eff4f21-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1259.170303] env[68674]: DEBUG oslo_concurrency.lockutils [None req-79c040af-6982-48f7-b4ba-6f6e0554f326 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "30731a3c-34ba-40c8-9b8f-2d867eff4f21-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1259.170303] env[68674]: DEBUG oslo_concurrency.lockutils [None req-79c040af-6982-48f7-b4ba-6f6e0554f326 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "30731a3c-34ba-40c8-9b8f-2d867eff4f21-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1259.170303] env[68674]: INFO nova.compute.manager [None req-79c040af-6982-48f7-b4ba-6f6e0554f326 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Terminating instance [ 1259.233556] env[68674]: DEBUG nova.scheduler.client.report [None req-6888b700-3626-4ba6-96b0-7e23fb274f17 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1259.490868] env[68674]: DEBUG nova.compute.manager [req-e7451161-4fd6-4665-bc03-0dea8515bc17 req-70de5241-f376-4128-957f-f003d86f32cb service nova] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Received event network-vif-deleted-fe2b8729-9ef5-4a98-b38f-405833365c1c {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1259.491089] env[68674]: INFO nova.compute.manager [req-e7451161-4fd6-4665-bc03-0dea8515bc17 req-70de5241-f376-4128-957f-f003d86f32cb service nova] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Neutron deleted interface fe2b8729-9ef5-4a98-b38f-405833365c1c; detaching it from the instance and deleting it from the info cache [ 1259.491217] env[68674]: DEBUG nova.network.neutron [req-e7451161-4fd6-4665-bc03-0dea8515bc17 req-70de5241-f376-4128-957f-f003d86f32cb service nova] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1259.673050] env[68674]: DEBUG nova.compute.manager [None req-79c040af-6982-48f7-b4ba-6f6e0554f326 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1259.673288] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-79c040af-6982-48f7-b4ba-6f6e0554f326 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1259.674241] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6d204a9-af92-4a6d-8286-29755783d2ae {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.682365] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-79c040af-6982-48f7-b4ba-6f6e0554f326 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1259.682605] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fd2abe3c-b5ae-457b-b772-52f50fd8f50c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.690310] env[68674]: DEBUG oslo_vmware.api [None req-79c040af-6982-48f7-b4ba-6f6e0554f326 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1259.690310] env[68674]: value = "task-3241129" [ 1259.690310] env[68674]: _type = "Task" [ 1259.690310] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.697298] env[68674]: DEBUG oslo_vmware.api [None req-79c040af-6982-48f7-b4ba-6f6e0554f326 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241129, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.738431] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6888b700-3626-4ba6-96b0-7e23fb274f17 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.639s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1259.758145] env[68674]: INFO nova.scheduler.client.report [None req-6888b700-3626-4ba6-96b0-7e23fb274f17 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Deleted allocations for instance f4751bd8-e0df-4686-a22f-e51a4a98b8d6 [ 1259.972527] env[68674]: DEBUG nova.network.neutron [-] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1259.994132] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a011b9f7-1720-49e0-ae58-a2c850a8e6bd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.005785] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c77107a-2c8a-4d07-be28-31b380061c69 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.035202] env[68674]: DEBUG nova.compute.manager [req-e7451161-4fd6-4665-bc03-0dea8515bc17 req-70de5241-f376-4128-957f-f003d86f32cb service nova] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Detach interface failed, port_id=fe2b8729-9ef5-4a98-b38f-405833365c1c, reason: Instance 2f698e5c-6be5-4747-b006-6ed6dd512f79 could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1260.198484] env[68674]: DEBUG oslo_vmware.api [None req-79c040af-6982-48f7-b4ba-6f6e0554f326 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241129, 'name': PowerOffVM_Task, 'duration_secs': 0.236293} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1260.198886] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-79c040af-6982-48f7-b4ba-6f6e0554f326 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1260.199149] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-79c040af-6982-48f7-b4ba-6f6e0554f326 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1260.199438] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9989c13b-909e-4e64-97f9-671efb71d2a1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.269624] env[68674]: DEBUG oslo_concurrency.lockutils [None req-6888b700-3626-4ba6-96b0-7e23fb274f17 tempest-AttachVolumeTestJSON-1348113896 tempest-AttachVolumeTestJSON-1348113896-project-member] Lock "f4751bd8-e0df-4686-a22f-e51a4a98b8d6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.823s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1260.271356] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-79c040af-6982-48f7-b4ba-6f6e0554f326 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1260.271556] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-79c040af-6982-48f7-b4ba-6f6e0554f326 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Deleting contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1260.271736] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-79c040af-6982-48f7-b4ba-6f6e0554f326 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Deleting the datastore file [datastore1] 30731a3c-34ba-40c8-9b8f-2d867eff4f21 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1260.271977] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-273906c3-c190-4e0b-ad62-37ec1dcbd3b9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.278631] env[68674]: DEBUG oslo_vmware.api [None req-79c040af-6982-48f7-b4ba-6f6e0554f326 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1260.278631] env[68674]: value = "task-3241131" [ 1260.278631] env[68674]: _type = "Task" [ 1260.278631] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.288417] env[68674]: DEBUG oslo_vmware.api [None req-79c040af-6982-48f7-b4ba-6f6e0554f326 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241131, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.477935] env[68674]: INFO nova.compute.manager [-] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Took 1.37 seconds to deallocate network for instance. [ 1260.789478] env[68674]: DEBUG oslo_vmware.api [None req-79c040af-6982-48f7-b4ba-6f6e0554f326 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241131, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157804} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1260.789750] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-79c040af-6982-48f7-b4ba-6f6e0554f326 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1260.789937] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-79c040af-6982-48f7-b4ba-6f6e0554f326 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Deleted contents of the VM from datastore datastore1 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1260.790138] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-79c040af-6982-48f7-b4ba-6f6e0554f326 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1260.790318] env[68674]: INFO nova.compute.manager [None req-79c040af-6982-48f7-b4ba-6f6e0554f326 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1260.790577] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-79c040af-6982-48f7-b4ba-6f6e0554f326 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1260.791129] env[68674]: DEBUG nova.compute.manager [-] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1260.791235] env[68674]: DEBUG nova.network.neutron [-] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1260.986050] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1260.986337] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1260.986528] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1261.009855] env[68674]: INFO nova.scheduler.client.report [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Deleted allocations for instance 2f698e5c-6be5-4747-b006-6ed6dd512f79 [ 1261.520134] env[68674]: DEBUG oslo_concurrency.lockutils [None req-9f58eaf2-7832-4753-818b-170a6f03f99f tempest-DeleteServersTestJSON-214076679 tempest-DeleteServersTestJSON-214076679-project-member] Lock "2f698e5c-6be5-4747-b006-6ed6dd512f79" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.033s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1261.522647] env[68674]: DEBUG nova.compute.manager [req-371a724a-08a8-4969-905e-b24712b432d1 req-7281de2f-11d2-4c66-a7c7-e2be84a3ec40 service nova] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Received event network-vif-deleted-fa6ef6fe-e229-4cc2-8230-7318adaa728e {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1261.522838] env[68674]: INFO nova.compute.manager [req-371a724a-08a8-4969-905e-b24712b432d1 req-7281de2f-11d2-4c66-a7c7-e2be84a3ec40 service nova] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Neutron deleted interface fa6ef6fe-e229-4cc2-8230-7318adaa728e; detaching it from the instance and deleting it from the info cache [ 1261.523018] env[68674]: DEBUG nova.network.neutron [req-371a724a-08a8-4969-905e-b24712b432d1 req-7281de2f-11d2-4c66-a7c7-e2be84a3ec40 service nova] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1261.751523] env[68674]: DEBUG nova.network.neutron [-] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1262.026062] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-64f3cf95-10d4-4e7b-9dea-d06f9e3c1278 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.035496] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-307cb57a-9fab-45f8-a293-6b796004ae26 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.059986] env[68674]: DEBUG nova.compute.manager [req-371a724a-08a8-4969-905e-b24712b432d1 req-7281de2f-11d2-4c66-a7c7-e2be84a3ec40 service nova] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Detach interface failed, port_id=fa6ef6fe-e229-4cc2-8230-7318adaa728e, reason: Instance 30731a3c-34ba-40c8-9b8f-2d867eff4f21 could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1262.254497] env[68674]: INFO nova.compute.manager [-] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Took 1.46 seconds to deallocate network for instance. [ 1262.761161] env[68674]: DEBUG oslo_concurrency.lockutils [None req-79c040af-6982-48f7-b4ba-6f6e0554f326 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1262.761490] env[68674]: DEBUG oslo_concurrency.lockutils [None req-79c040af-6982-48f7-b4ba-6f6e0554f326 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1262.761658] env[68674]: DEBUG oslo_concurrency.lockutils [None req-79c040af-6982-48f7-b4ba-6f6e0554f326 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1262.781813] env[68674]: INFO nova.scheduler.client.report [None req-79c040af-6982-48f7-b4ba-6f6e0554f326 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Deleted allocations for instance 30731a3c-34ba-40c8-9b8f-2d867eff4f21 [ 1263.290023] env[68674]: DEBUG oslo_concurrency.lockutils [None req-79c040af-6982-48f7-b4ba-6f6e0554f326 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "30731a3c-34ba-40c8-9b8f-2d867eff4f21" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.123s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1264.005175] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "b69391a0-57b0-469a-ac86-7f1fd3e1fad6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1264.005439] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "b69391a0-57b0-469a-ac86-7f1fd3e1fad6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1264.508096] env[68674]: DEBUG nova.compute.manager [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1265.036655] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1265.039071] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1265.039071] env[68674]: INFO nova.compute.claims [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1266.083016] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14fe2693-4311-40ac-add8-ae49f1424476 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.089341] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0572d998-7a95-4184-a368-40165d8533fa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.120217] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a1b2d50-28d6-4976-9ee3-2ce68449bf26 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.127518] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c3c80eb-2886-428f-bc39-b0b7bb47fe51 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.140624] env[68674]: DEBUG nova.compute.provider_tree [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1266.644358] env[68674]: DEBUG nova.scheduler.client.report [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1267.149138] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.112s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1267.149726] env[68674]: DEBUG nova.compute.manager [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1267.654894] env[68674]: DEBUG nova.compute.utils [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1267.656509] env[68674]: DEBUG nova.compute.manager [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1267.656573] env[68674]: DEBUG nova.network.neutron [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1267.704172] env[68674]: DEBUG nova.policy [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '34b9ef7eae4a4cceba2fa699ce38ac0d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6f2a133c72064227bd419d63d5d9557f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 1267.990573] env[68674]: DEBUG nova.network.neutron [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Successfully created port: c2c63fe4-69d1-4322-981a-58d2974426aa {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1268.160411] env[68674]: DEBUG nova.compute.manager [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1269.170873] env[68674]: DEBUG nova.compute.manager [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1269.199203] env[68674]: DEBUG nova.virt.hardware [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1269.199203] env[68674]: DEBUG nova.virt.hardware [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1269.199203] env[68674]: DEBUG nova.virt.hardware [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1269.199431] env[68674]: DEBUG nova.virt.hardware [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1269.199555] env[68674]: DEBUG nova.virt.hardware [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1269.199698] env[68674]: DEBUG nova.virt.hardware [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1269.199936] env[68674]: DEBUG nova.virt.hardware [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1269.200119] env[68674]: DEBUG nova.virt.hardware [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1269.200297] env[68674]: DEBUG nova.virt.hardware [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1269.200462] env[68674]: DEBUG nova.virt.hardware [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1269.200632] env[68674]: DEBUG nova.virt.hardware [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1269.201516] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8584826c-062c-49d6-89e6-9dd10b61a3ec {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.209280] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0197552-cbc8-414c-a90a-0378b04bf103 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.331594] env[68674]: DEBUG nova.compute.manager [req-d4bae613-59d1-4a0d-8252-ddef0aa5a19c req-ab3f0978-c861-40f2-b186-cdb182de0a0c service nova] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Received event network-vif-plugged-c2c63fe4-69d1-4322-981a-58d2974426aa {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1269.331826] env[68674]: DEBUG oslo_concurrency.lockutils [req-d4bae613-59d1-4a0d-8252-ddef0aa5a19c req-ab3f0978-c861-40f2-b186-cdb182de0a0c service nova] Acquiring lock "b69391a0-57b0-469a-ac86-7f1fd3e1fad6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1269.332030] env[68674]: DEBUG oslo_concurrency.lockutils [req-d4bae613-59d1-4a0d-8252-ddef0aa5a19c req-ab3f0978-c861-40f2-b186-cdb182de0a0c service nova] Lock "b69391a0-57b0-469a-ac86-7f1fd3e1fad6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1269.332208] env[68674]: DEBUG oslo_concurrency.lockutils [req-d4bae613-59d1-4a0d-8252-ddef0aa5a19c req-ab3f0978-c861-40f2-b186-cdb182de0a0c service nova] Lock "b69391a0-57b0-469a-ac86-7f1fd3e1fad6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1269.332431] env[68674]: DEBUG nova.compute.manager [req-d4bae613-59d1-4a0d-8252-ddef0aa5a19c req-ab3f0978-c861-40f2-b186-cdb182de0a0c service nova] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] No waiting events found dispatching network-vif-plugged-c2c63fe4-69d1-4322-981a-58d2974426aa {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1269.332574] env[68674]: WARNING nova.compute.manager [req-d4bae613-59d1-4a0d-8252-ddef0aa5a19c req-ab3f0978-c861-40f2-b186-cdb182de0a0c service nova] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Received unexpected event network-vif-plugged-c2c63fe4-69d1-4322-981a-58d2974426aa for instance with vm_state building and task_state spawning. [ 1269.408025] env[68674]: DEBUG nova.network.neutron [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Successfully updated port: c2c63fe4-69d1-4322-981a-58d2974426aa {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1269.910412] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "refresh_cache-b69391a0-57b0-469a-ac86-7f1fd3e1fad6" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1269.910412] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquired lock "refresh_cache-b69391a0-57b0-469a-ac86-7f1fd3e1fad6" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1269.910557] env[68674]: DEBUG nova.network.neutron [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1270.441802] env[68674]: DEBUG nova.network.neutron [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1270.552622] env[68674]: DEBUG nova.network.neutron [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Updating instance_info_cache with network_info: [{"id": "c2c63fe4-69d1-4322-981a-58d2974426aa", "address": "fa:16:3e:50:dc:00", "network": {"id": "f2a6b57a-fec9-4bd2-9828-2b72f21f2393", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1479923638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f2a133c72064227bd419d63d5d9557f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2c63fe4-69", "ovs_interfaceid": "c2c63fe4-69d1-4322-981a-58d2974426aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1271.058995] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Releasing lock "refresh_cache-b69391a0-57b0-469a-ac86-7f1fd3e1fad6" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1271.058995] env[68674]: DEBUG nova.compute.manager [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Instance network_info: |[{"id": "c2c63fe4-69d1-4322-981a-58d2974426aa", "address": "fa:16:3e:50:dc:00", "network": {"id": "f2a6b57a-fec9-4bd2-9828-2b72f21f2393", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1479923638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f2a133c72064227bd419d63d5d9557f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2c63fe4-69", "ovs_interfaceid": "c2c63fe4-69d1-4322-981a-58d2974426aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1271.058995] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:dc:00', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8868dc2-7767-49c0-a2ed-e611fcbf8414', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c2c63fe4-69d1-4322-981a-58d2974426aa', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1271.066416] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1271.066789] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1271.067324] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-51b61112-77d4-411e-b70e-e785aa5347f1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.088134] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1271.088134] env[68674]: value = "task-3241136" [ 1271.088134] env[68674]: _type = "Task" [ 1271.088134] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.096208] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241136, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.358272] env[68674]: DEBUG nova.compute.manager [req-a9683331-7e8e-44f5-8af1-5f1842917459 req-60704a61-6ec1-47b9-9649-824040a50939 service nova] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Received event network-changed-c2c63fe4-69d1-4322-981a-58d2974426aa {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1271.358504] env[68674]: DEBUG nova.compute.manager [req-a9683331-7e8e-44f5-8af1-5f1842917459 req-60704a61-6ec1-47b9-9649-824040a50939 service nova] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Refreshing instance network info cache due to event network-changed-c2c63fe4-69d1-4322-981a-58d2974426aa. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1271.358736] env[68674]: DEBUG oslo_concurrency.lockutils [req-a9683331-7e8e-44f5-8af1-5f1842917459 req-60704a61-6ec1-47b9-9649-824040a50939 service nova] Acquiring lock "refresh_cache-b69391a0-57b0-469a-ac86-7f1fd3e1fad6" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1271.358886] env[68674]: DEBUG oslo_concurrency.lockutils [req-a9683331-7e8e-44f5-8af1-5f1842917459 req-60704a61-6ec1-47b9-9649-824040a50939 service nova] Acquired lock "refresh_cache-b69391a0-57b0-469a-ac86-7f1fd3e1fad6" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1271.359377] env[68674]: DEBUG nova.network.neutron [req-a9683331-7e8e-44f5-8af1-5f1842917459 req-60704a61-6ec1-47b9-9649-824040a50939 service nova] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Refreshing network info cache for port c2c63fe4-69d1-4322-981a-58d2974426aa {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1271.598791] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241136, 'name': CreateVM_Task, 'duration_secs': 0.31341} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.599076] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1271.599654] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1271.600097] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1271.600426] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1271.600670] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc77ea50-361e-4826-a274-ae0eaf4b3633 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.605129] env[68674]: DEBUG oslo_vmware.api [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1271.605129] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f3f4c3-b3b9-24e1-bd18-bba12ee0c03d" [ 1271.605129] env[68674]: _type = "Task" [ 1271.605129] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.613263] env[68674]: DEBUG oslo_vmware.api [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f3f4c3-b3b9-24e1-bd18-bba12ee0c03d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.118416] env[68674]: DEBUG oslo_vmware.api [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52f3f4c3-b3b9-24e1-bd18-bba12ee0c03d, 'name': SearchDatastore_Task, 'duration_secs': 0.009764} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.118733] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1272.118975] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1272.119226] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1272.119381] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1272.119615] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1272.119829] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3e3492e5-0921-4b94-a8b8-2f61bcd4ddb1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.128701] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1272.128888] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1272.129632] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b38fe4db-24de-45fd-9df5-040cd39c0787 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.137416] env[68674]: DEBUG oslo_vmware.api [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1272.137416] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5284cd33-275f-dd80-78d1-26952614fca7" [ 1272.137416] env[68674]: _type = "Task" [ 1272.137416] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.145115] env[68674]: DEBUG oslo_vmware.api [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5284cd33-275f-dd80-78d1-26952614fca7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.176256] env[68674]: DEBUG nova.network.neutron [req-a9683331-7e8e-44f5-8af1-5f1842917459 req-60704a61-6ec1-47b9-9649-824040a50939 service nova] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Updated VIF entry in instance network info cache for port c2c63fe4-69d1-4322-981a-58d2974426aa. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1272.176835] env[68674]: DEBUG nova.network.neutron [req-a9683331-7e8e-44f5-8af1-5f1842917459 req-60704a61-6ec1-47b9-9649-824040a50939 service nova] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Updating instance_info_cache with network_info: [{"id": "c2c63fe4-69d1-4322-981a-58d2974426aa", "address": "fa:16:3e:50:dc:00", "network": {"id": "f2a6b57a-fec9-4bd2-9828-2b72f21f2393", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1479923638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f2a133c72064227bd419d63d5d9557f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2c63fe4-69", "ovs_interfaceid": "c2c63fe4-69d1-4322-981a-58d2974426aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1272.648830] env[68674]: DEBUG oslo_vmware.api [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5284cd33-275f-dd80-78d1-26952614fca7, 'name': SearchDatastore_Task, 'duration_secs': 0.009051} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.649610] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3965485-44ed-4b5c-9777-8eb5652a240d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.654759] env[68674]: DEBUG oslo_vmware.api [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1272.654759] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5211f63a-3fd2-fde6-bc13-a86cbd8f9a50" [ 1272.654759] env[68674]: _type = "Task" [ 1272.654759] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.663118] env[68674]: DEBUG oslo_vmware.api [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5211f63a-3fd2-fde6-bc13-a86cbd8f9a50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.679736] env[68674]: DEBUG oslo_concurrency.lockutils [req-a9683331-7e8e-44f5-8af1-5f1842917459 req-60704a61-6ec1-47b9-9649-824040a50939 service nova] Releasing lock "refresh_cache-b69391a0-57b0-469a-ac86-7f1fd3e1fad6" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1273.165916] env[68674]: DEBUG oslo_vmware.api [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]5211f63a-3fd2-fde6-bc13-a86cbd8f9a50, 'name': SearchDatastore_Task, 'duration_secs': 0.009818} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1273.166212] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1273.166486] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] b69391a0-57b0-469a-ac86-7f1fd3e1fad6/b69391a0-57b0-469a-ac86-7f1fd3e1fad6.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1273.166744] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9ca1c0a1-2118-44f6-9ed9-9829e5fdbb1e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.173708] env[68674]: DEBUG oslo_vmware.api [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1273.173708] env[68674]: value = "task-3241137" [ 1273.173708] env[68674]: _type = "Task" [ 1273.173708] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1273.181050] env[68674]: DEBUG oslo_vmware.api [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241137, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.683965] env[68674]: DEBUG oslo_vmware.api [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241137, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.473897} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1273.684298] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] b69391a0-57b0-469a-ac86-7f1fd3e1fad6/b69391a0-57b0-469a-ac86-7f1fd3e1fad6.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1273.684458] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1273.684708] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-46c61b2f-8d42-4431-a17f-c26d72ba1781 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.690578] env[68674]: DEBUG oslo_vmware.api [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1273.690578] env[68674]: value = "task-3241138" [ 1273.690578] env[68674]: _type = "Task" [ 1273.690578] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1273.697706] env[68674]: DEBUG oslo_vmware.api [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241138, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.200102] env[68674]: DEBUG oslo_vmware.api [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241138, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060896} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.200386] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1274.201160] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cec39bf2-6218-44ab-814d-58947e150826 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.222987] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Reconfiguring VM instance instance-0000007e to attach disk [datastore2] b69391a0-57b0-469a-ac86-7f1fd3e1fad6/b69391a0-57b0-469a-ac86-7f1fd3e1fad6.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1274.223211] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c6fdc42-511d-4c9d-bf95-781c82b6cd2a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.242515] env[68674]: DEBUG oslo_vmware.api [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1274.242515] env[68674]: value = "task-3241139" [ 1274.242515] env[68674]: _type = "Task" [ 1274.242515] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.250277] env[68674]: DEBUG oslo_vmware.api [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241139, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.752052] env[68674]: DEBUG oslo_vmware.api [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241139, 'name': ReconfigVM_Task, 'duration_secs': 0.286176} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.752414] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Reconfigured VM instance instance-0000007e to attach disk [datastore2] b69391a0-57b0-469a-ac86-7f1fd3e1fad6/b69391a0-57b0-469a-ac86-7f1fd3e1fad6.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1274.752990] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e301fcf8-8bfd-4d26-be9b-b623f7a50fa1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.759380] env[68674]: DEBUG oslo_vmware.api [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1274.759380] env[68674]: value = "task-3241140" [ 1274.759380] env[68674]: _type = "Task" [ 1274.759380] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.768764] env[68674]: DEBUG oslo_vmware.api [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241140, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.268784] env[68674]: DEBUG oslo_vmware.api [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241140, 'name': Rename_Task, 'duration_secs': 0.15296} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.269055] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1275.269286] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fae17b20-25b0-4259-bbf7-f7f5521d0be2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.274962] env[68674]: DEBUG oslo_vmware.api [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1275.274962] env[68674]: value = "task-3241141" [ 1275.274962] env[68674]: _type = "Task" [ 1275.274962] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.283141] env[68674]: DEBUG oslo_vmware.api [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241141, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.785220] env[68674]: DEBUG oslo_vmware.api [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241141, 'name': PowerOnVM_Task, 'duration_secs': 0.456838} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.785570] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1275.785703] env[68674]: INFO nova.compute.manager [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Took 6.62 seconds to spawn the instance on the hypervisor. [ 1275.785879] env[68674]: DEBUG nova.compute.manager [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1275.786647] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74798aa3-c6fe-4be4-956e-51ab96a7f50a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.307466] env[68674]: INFO nova.compute.manager [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Took 11.29 seconds to build instance. [ 1276.396135] env[68674]: DEBUG nova.compute.manager [req-a95789d6-c54d-42ef-889e-e9d0b14dee04 req-c166c881-c01a-4fff-93df-49249ed890b8 service nova] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Received event network-changed-c2c63fe4-69d1-4322-981a-58d2974426aa {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1276.396340] env[68674]: DEBUG nova.compute.manager [req-a95789d6-c54d-42ef-889e-e9d0b14dee04 req-c166c881-c01a-4fff-93df-49249ed890b8 service nova] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Refreshing instance network info cache due to event network-changed-c2c63fe4-69d1-4322-981a-58d2974426aa. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1276.396550] env[68674]: DEBUG oslo_concurrency.lockutils [req-a95789d6-c54d-42ef-889e-e9d0b14dee04 req-c166c881-c01a-4fff-93df-49249ed890b8 service nova] Acquiring lock "refresh_cache-b69391a0-57b0-469a-ac86-7f1fd3e1fad6" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1276.396693] env[68674]: DEBUG oslo_concurrency.lockutils [req-a95789d6-c54d-42ef-889e-e9d0b14dee04 req-c166c881-c01a-4fff-93df-49249ed890b8 service nova] Acquired lock "refresh_cache-b69391a0-57b0-469a-ac86-7f1fd3e1fad6" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1276.396849] env[68674]: DEBUG nova.network.neutron [req-a95789d6-c54d-42ef-889e-e9d0b14dee04 req-c166c881-c01a-4fff-93df-49249ed890b8 service nova] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Refreshing network info cache for port c2c63fe4-69d1-4322-981a-58d2974426aa {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1276.809964] env[68674]: DEBUG oslo_concurrency.lockutils [None req-bb1f0386-12f3-4a77-b7ca-82b350921fc8 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "b69391a0-57b0-469a-ac86-7f1fd3e1fad6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.804s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1277.090648] env[68674]: DEBUG nova.network.neutron [req-a95789d6-c54d-42ef-889e-e9d0b14dee04 req-c166c881-c01a-4fff-93df-49249ed890b8 service nova] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Updated VIF entry in instance network info cache for port c2c63fe4-69d1-4322-981a-58d2974426aa. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1277.091027] env[68674]: DEBUG nova.network.neutron [req-a95789d6-c54d-42ef-889e-e9d0b14dee04 req-c166c881-c01a-4fff-93df-49249ed890b8 service nova] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Updating instance_info_cache with network_info: [{"id": "c2c63fe4-69d1-4322-981a-58d2974426aa", "address": "fa:16:3e:50:dc:00", "network": {"id": "f2a6b57a-fec9-4bd2-9828-2b72f21f2393", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1479923638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.236", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f2a133c72064227bd419d63d5d9557f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2c63fe4-69", "ovs_interfaceid": "c2c63fe4-69d1-4322-981a-58d2974426aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1277.594334] env[68674]: DEBUG oslo_concurrency.lockutils [req-a95789d6-c54d-42ef-889e-e9d0b14dee04 req-c166c881-c01a-4fff-93df-49249ed890b8 service nova] Releasing lock "refresh_cache-b69391a0-57b0-469a-ac86-7f1fd3e1fad6" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1296.072656] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1296.072656] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1296.073088] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1296.073088] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1296.073088] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1296.073263] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1296.073312] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1296.073444] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68674) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1296.073593] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager.update_available_resource {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1296.577351] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1296.577620] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1296.577791] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1296.577945] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68674) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1296.578843] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b81f037c-975f-4f8a-8683-eed3ce9b0ac6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.586914] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f07a816-d791-4554-a9df-a577d771b528 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.600575] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f29246b-737c-4c66-b249-3d185ca43937 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.606509] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b463a32b-fea6-4b0e-907f-7eb38950f6dc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.636545] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180373MB free_disk=120GB free_vcpus=48 pci_devices=None {{(pid=68674) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1296.636671] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1296.636870] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1297.661592] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance b69391a0-57b0-469a-ac86-7f1fd3e1fad6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1297.661838] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=68674) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1297.661963] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=68674) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1297.686910] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3934f81a-0950-478f-ab1d-55a5fadef57b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.694746] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73766523-13f9-4f09-bf70-9e417cdbf777 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.723893] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25b3bef1-0077-44b1-a174-5ab03de228b8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.731097] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd69a2d3-3349-4cd4-a93d-babcaa26fd4e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.744752] env[68674]: DEBUG nova.compute.provider_tree [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1298.248117] env[68674]: DEBUG nova.scheduler.client.report [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1298.752872] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68674) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1298.753254] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.116s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1314.002687] env[68674]: DEBUG nova.compute.manager [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Stashing vm_state: active {{(pid=68674) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1314.523056] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1314.523399] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1315.029501] env[68674]: INFO nova.compute.claims [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1315.535052] env[68674]: INFO nova.compute.resource_tracker [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Updating resource usage from migration b5882274-d308-4404-8e42-6f6c5c509cb2 [ 1315.571864] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3871a6d-1f92-48b2-ad32-1043e61d45af {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.579291] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e7e2300-1385-4382-acab-7f202d61bad9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.249193] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54e7f1cd-19c6-4e29-9ba9-7f0d2a2c4372 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.257095] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5f4aa9e-7774-4b9c-865a-4e7e9f4226bf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.270047] env[68674]: DEBUG nova.compute.provider_tree [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1316.772989] env[68674]: DEBUG nova.scheduler.client.report [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1317.277702] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.754s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1317.277968] env[68674]: INFO nova.compute.manager [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Migrating [ 1317.791687] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "refresh_cache-b69391a0-57b0-469a-ac86-7f1fd3e1fad6" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1317.791940] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquired lock "refresh_cache-b69391a0-57b0-469a-ac86-7f1fd3e1fad6" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1317.792088] env[68674]: DEBUG nova.network.neutron [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1318.544595] env[68674]: DEBUG nova.network.neutron [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Updating instance_info_cache with network_info: [{"id": "c2c63fe4-69d1-4322-981a-58d2974426aa", "address": "fa:16:3e:50:dc:00", "network": {"id": "f2a6b57a-fec9-4bd2-9828-2b72f21f2393", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1479923638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.236", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f2a133c72064227bd419d63d5d9557f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2c63fe4-69", "ovs_interfaceid": "c2c63fe4-69d1-4322-981a-58d2974426aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1319.047869] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Releasing lock "refresh_cache-b69391a0-57b0-469a-ac86-7f1fd3e1fad6" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1320.563506] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5724286e-6a41-41e0-8eb9-9be146253ce7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.583112] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Updating instance 'b69391a0-57b0-469a-ac86-7f1fd3e1fad6' progress to 0 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1321.089032] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1321.089249] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0827bc4e-bc9f-48bb-b03a-9ab35be67f88 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.096129] env[68674]: DEBUG oslo_vmware.api [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1321.096129] env[68674]: value = "task-3241142" [ 1321.096129] env[68674]: _type = "Task" [ 1321.096129] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1321.103706] env[68674]: DEBUG oslo_vmware.api [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241142, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.606057] env[68674]: DEBUG oslo_vmware.api [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241142, 'name': PowerOffVM_Task, 'duration_secs': 0.176622} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.606443] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1321.606500] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Updating instance 'b69391a0-57b0-469a-ac86-7f1fd3e1fad6' progress to 17 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1322.113362] env[68674]: DEBUG nova.virt.hardware [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1322.113610] env[68674]: DEBUG nova.virt.hardware [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1322.113767] env[68674]: DEBUG nova.virt.hardware [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1322.113950] env[68674]: DEBUG nova.virt.hardware [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1322.114111] env[68674]: DEBUG nova.virt.hardware [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1322.114259] env[68674]: DEBUG nova.virt.hardware [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1322.114464] env[68674]: DEBUG nova.virt.hardware [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1322.114622] env[68674]: DEBUG nova.virt.hardware [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1322.114791] env[68674]: DEBUG nova.virt.hardware [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1322.114964] env[68674]: DEBUG nova.virt.hardware [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1322.115147] env[68674]: DEBUG nova.virt.hardware [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1322.120195] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0890487d-31af-4282-879e-7d2b3fcfb688 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.136529] env[68674]: DEBUG oslo_vmware.api [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1322.136529] env[68674]: value = "task-3241143" [ 1322.136529] env[68674]: _type = "Task" [ 1322.136529] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.144233] env[68674]: DEBUG oslo_vmware.api [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241143, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.646185] env[68674]: DEBUG oslo_vmware.api [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241143, 'name': ReconfigVM_Task, 'duration_secs': 0.157213} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1322.646548] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Updating instance 'b69391a0-57b0-469a-ac86-7f1fd3e1fad6' progress to 33 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1323.152804] env[68674]: DEBUG nova.virt.hardware [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1323.153078] env[68674]: DEBUG nova.virt.hardware [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1323.153245] env[68674]: DEBUG nova.virt.hardware [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1323.153432] env[68674]: DEBUG nova.virt.hardware [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1323.153582] env[68674]: DEBUG nova.virt.hardware [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1323.153732] env[68674]: DEBUG nova.virt.hardware [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1323.153938] env[68674]: DEBUG nova.virt.hardware [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1323.154109] env[68674]: DEBUG nova.virt.hardware [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1323.154278] env[68674]: DEBUG nova.virt.hardware [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1323.154441] env[68674]: DEBUG nova.virt.hardware [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1323.154633] env[68674]: DEBUG nova.virt.hardware [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1323.159960] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Reconfiguring VM instance instance-0000007e to detach disk 2000 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1323.160256] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3db71129-0396-421f-a247-a1f9b8250e89 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.178174] env[68674]: DEBUG oslo_vmware.api [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1323.178174] env[68674]: value = "task-3241144" [ 1323.178174] env[68674]: _type = "Task" [ 1323.178174] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.185656] env[68674]: DEBUG oslo_vmware.api [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241144, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.687705] env[68674]: DEBUG oslo_vmware.api [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241144, 'name': ReconfigVM_Task, 'duration_secs': 0.14972} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.688069] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Reconfigured VM instance instance-0000007e to detach disk 2000 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1323.688769] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c26f22c-4b38-4b1b-9898-1aea4c5fcd0d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.710709] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Reconfiguring VM instance instance-0000007e to attach disk [datastore2] b69391a0-57b0-469a-ac86-7f1fd3e1fad6/b69391a0-57b0-469a-ac86-7f1fd3e1fad6.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1323.711088] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7fcad65c-4234-4a06-9334-8647a99440d9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.729299] env[68674]: DEBUG oslo_vmware.api [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1323.729299] env[68674]: value = "task-3241145" [ 1323.729299] env[68674]: _type = "Task" [ 1323.729299] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.736643] env[68674]: DEBUG oslo_vmware.api [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241145, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.239378] env[68674]: DEBUG oslo_vmware.api [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241145, 'name': ReconfigVM_Task, 'duration_secs': 0.253442} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.239618] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Reconfigured VM instance instance-0000007e to attach disk [datastore2] b69391a0-57b0-469a-ac86-7f1fd3e1fad6/b69391a0-57b0-469a-ac86-7f1fd3e1fad6.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1324.239879] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Updating instance 'b69391a0-57b0-469a-ac86-7f1fd3e1fad6' progress to 50 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1324.746886] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f210178-19fe-43f7-b33d-03c49c721916 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.765680] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc82ed3b-25dc-4b72-992b-0780a41cb777 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.782627] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Updating instance 'b69391a0-57b0-469a-ac86-7f1fd3e1fad6' progress to 67 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1325.319924] env[68674]: DEBUG nova.network.neutron [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Port c2c63fe4-69d1-4322-981a-58d2974426aa binding to destination host cpu-1 is already ACTIVE {{(pid=68674) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1326.341330] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "b69391a0-57b0-469a-ac86-7f1fd3e1fad6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1326.341696] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "b69391a0-57b0-469a-ac86-7f1fd3e1fad6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1326.341739] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "b69391a0-57b0-469a-ac86-7f1fd3e1fad6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1327.375295] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "refresh_cache-b69391a0-57b0-469a-ac86-7f1fd3e1fad6" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1327.375571] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquired lock "refresh_cache-b69391a0-57b0-469a-ac86-7f1fd3e1fad6" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1327.375680] env[68674]: DEBUG nova.network.neutron [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1328.062204] env[68674]: DEBUG nova.network.neutron [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Updating instance_info_cache with network_info: [{"id": "c2c63fe4-69d1-4322-981a-58d2974426aa", "address": "fa:16:3e:50:dc:00", "network": {"id": "f2a6b57a-fec9-4bd2-9828-2b72f21f2393", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1479923638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.236", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f2a133c72064227bd419d63d5d9557f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2c63fe4-69", "ovs_interfaceid": "c2c63fe4-69d1-4322-981a-58d2974426aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1328.565071] env[68674]: DEBUG oslo_concurrency.lockutils [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Releasing lock "refresh_cache-b69391a0-57b0-469a-ac86-7f1fd3e1fad6" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1329.089848] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f7f631-4c70-4fb7-8e79-5a4be15e3db8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.108189] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62653bd1-7002-4992-9ea4-ba075b5a6245 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.114766] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Updating instance 'b69391a0-57b0-469a-ac86-7f1fd3e1fad6' progress to 83 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1329.621148] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1329.621511] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-edb726ad-aea7-4412-b80a-71de612dc0c8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.628844] env[68674]: DEBUG oslo_vmware.api [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1329.628844] env[68674]: value = "task-3241146" [ 1329.628844] env[68674]: _type = "Task" [ 1329.628844] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.636402] env[68674]: DEBUG oslo_vmware.api [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241146, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.141932] env[68674]: DEBUG oslo_vmware.api [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241146, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.638430] env[68674]: DEBUG oslo_vmware.api [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241146, 'name': PowerOnVM_Task, 'duration_secs': 0.539234} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.638839] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1330.638935] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-3d6f4253-052e-4bd2-95f5-7feaf7aff1ca tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Updating instance 'b69391a0-57b0-469a-ac86-7f1fd3e1fad6' progress to 100 {{(pid=68674) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1333.473592] env[68674]: DEBUG nova.network.neutron [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Port c2c63fe4-69d1-4322-981a-58d2974426aa binding to destination host cpu-1 is already ACTIVE {{(pid=68674) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1333.473908] env[68674]: DEBUG oslo_concurrency.lockutils [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "refresh_cache-b69391a0-57b0-469a-ac86-7f1fd3e1fad6" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1333.474087] env[68674]: DEBUG oslo_concurrency.lockutils [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquired lock "refresh_cache-b69391a0-57b0-469a-ac86-7f1fd3e1fad6" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1333.474254] env[68674]: DEBUG nova.network.neutron [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1334.208564] env[68674]: DEBUG nova.network.neutron [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Updating instance_info_cache with network_info: [{"id": "c2c63fe4-69d1-4322-981a-58d2974426aa", "address": "fa:16:3e:50:dc:00", "network": {"id": "f2a6b57a-fec9-4bd2-9828-2b72f21f2393", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1479923638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.236", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f2a133c72064227bd419d63d5d9557f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2c63fe4-69", "ovs_interfaceid": "c2c63fe4-69d1-4322-981a-58d2974426aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1334.711219] env[68674]: DEBUG oslo_concurrency.lockutils [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Releasing lock "refresh_cache-b69391a0-57b0-469a-ac86-7f1fd3e1fad6" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1335.214730] env[68674]: DEBUG nova.compute.manager [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=68674) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1335.214979] env[68674]: DEBUG oslo_concurrency.lockutils [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1335.215251] env[68674]: DEBUG oslo_concurrency.lockutils [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1335.718799] env[68674]: DEBUG nova.objects.instance [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lazy-loading 'migration_context' on Instance uuid b69391a0-57b0-469a-ac86-7f1fd3e1fad6 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1336.265472] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61c0329d-2823-4723-aaac-01d4bbf0ddd4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.273808] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4605b81c-4094-460b-984b-a7264d4b244b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.303766] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1360adbd-26e7-4096-bdba-c51feb8b10c1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.310702] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93fa07e8-b06b-4acd-998b-e33ab4199134 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.323230] env[68674]: DEBUG nova.compute.provider_tree [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1336.826453] env[68674]: DEBUG nova.scheduler.client.report [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1337.838017] env[68674]: DEBUG oslo_concurrency.lockutils [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.622s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1339.371950] env[68674]: INFO nova.compute.manager [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Swapping old allocation on dict_keys(['ade3f042-7427-494b-9654-0b65e074850c']) held by migration b5882274-d308-4404-8e42-6f6c5c509cb2 for instance [ 1339.391800] env[68674]: DEBUG nova.scheduler.client.report [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Overwriting current allocation {'allocations': {'ade3f042-7427-494b-9654-0b65e074850c': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 187}}, 'project_id': '6f2a133c72064227bd419d63d5d9557f', 'user_id': '34b9ef7eae4a4cceba2fa699ce38ac0d', 'consumer_generation': 1} on consumer b69391a0-57b0-469a-ac86-7f1fd3e1fad6 {{(pid=68674) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1339.472111] env[68674]: DEBUG oslo_concurrency.lockutils [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "refresh_cache-b69391a0-57b0-469a-ac86-7f1fd3e1fad6" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1339.472324] env[68674]: DEBUG oslo_concurrency.lockutils [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquired lock "refresh_cache-b69391a0-57b0-469a-ac86-7f1fd3e1fad6" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1339.472507] env[68674]: DEBUG nova.network.neutron [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1340.368934] env[68674]: DEBUG nova.network.neutron [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Updating instance_info_cache with network_info: [{"id": "c2c63fe4-69d1-4322-981a-58d2974426aa", "address": "fa:16:3e:50:dc:00", "network": {"id": "f2a6b57a-fec9-4bd2-9828-2b72f21f2393", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1479923638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.236", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f2a133c72064227bd419d63d5d9557f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2c63fe4-69", "ovs_interfaceid": "c2c63fe4-69d1-4322-981a-58d2974426aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1340.872335] env[68674]: DEBUG oslo_concurrency.lockutils [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Releasing lock "refresh_cache-b69391a0-57b0-469a-ac86-7f1fd3e1fad6" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1340.872790] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1340.873096] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-52e68d81-3e5b-4cdc-a01f-12809dcd6c36 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.881136] env[68674]: DEBUG oslo_vmware.api [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1340.881136] env[68674]: value = "task-3241147" [ 1340.881136] env[68674]: _type = "Task" [ 1340.881136] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.888819] env[68674]: DEBUG oslo_vmware.api [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241147, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.390717] env[68674]: DEBUG oslo_vmware.api [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241147, 'name': PowerOffVM_Task, 'duration_secs': 0.198635} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.391050] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1341.391781] env[68674]: DEBUG nova.virt.hardware [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1341.392024] env[68674]: DEBUG nova.virt.hardware [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1341.392188] env[68674]: DEBUG nova.virt.hardware [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1341.392376] env[68674]: DEBUG nova.virt.hardware [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1341.392525] env[68674]: DEBUG nova.virt.hardware [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1341.392680] env[68674]: DEBUG nova.virt.hardware [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1341.392884] env[68674]: DEBUG nova.virt.hardware [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1341.393055] env[68674]: DEBUG nova.virt.hardware [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1341.393228] env[68674]: DEBUG nova.virt.hardware [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1341.393392] env[68674]: DEBUG nova.virt.hardware [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1341.393571] env[68674]: DEBUG nova.virt.hardware [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1341.398502] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d6a91830-459e-4e07-b8ac-ec7a82825ac8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.414421] env[68674]: DEBUG oslo_vmware.api [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1341.414421] env[68674]: value = "task-3241148" [ 1341.414421] env[68674]: _type = "Task" [ 1341.414421] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.421979] env[68674]: DEBUG oslo_vmware.api [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241148, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.924617] env[68674]: DEBUG oslo_vmware.api [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241148, 'name': ReconfigVM_Task, 'duration_secs': 0.146374} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.925544] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8178929a-8c46-4735-842f-74bf0299ed65 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.943512] env[68674]: DEBUG nova.virt.hardware [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1341.943800] env[68674]: DEBUG nova.virt.hardware [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1341.943987] env[68674]: DEBUG nova.virt.hardware [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1341.944193] env[68674]: DEBUG nova.virt.hardware [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1341.944342] env[68674]: DEBUG nova.virt.hardware [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1341.944490] env[68674]: DEBUG nova.virt.hardware [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1341.944694] env[68674]: DEBUG nova.virt.hardware [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1341.944854] env[68674]: DEBUG nova.virt.hardware [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1341.945035] env[68674]: DEBUG nova.virt.hardware [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1341.945212] env[68674]: DEBUG nova.virt.hardware [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1341.945390] env[68674]: DEBUG nova.virt.hardware [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1341.946254] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-031e6954-e32d-4f4d-be96-bc11bf3e32da {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.951642] env[68674]: DEBUG oslo_vmware.api [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1341.951642] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523940bf-d116-c493-d2fd-77e90f88a074" [ 1341.951642] env[68674]: _type = "Task" [ 1341.951642] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.959204] env[68674]: DEBUG oslo_vmware.api [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523940bf-d116-c493-d2fd-77e90f88a074, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.461361] env[68674]: DEBUG oslo_vmware.api [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]523940bf-d116-c493-d2fd-77e90f88a074, 'name': SearchDatastore_Task, 'duration_secs': 0.007105} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.466591] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Reconfiguring VM instance instance-0000007e to detach disk 2000 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1342.466854] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1060f555-cd45-4aa5-8699-5f248ab03199 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.484561] env[68674]: DEBUG oslo_vmware.api [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1342.484561] env[68674]: value = "task-3241149" [ 1342.484561] env[68674]: _type = "Task" [ 1342.484561] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.491698] env[68674]: DEBUG oslo_vmware.api [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241149, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.994186] env[68674]: DEBUG oslo_vmware.api [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241149, 'name': ReconfigVM_Task, 'duration_secs': 0.178223} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.994606] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Reconfigured VM instance instance-0000007e to detach disk 2000 {{(pid=68674) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1342.995270] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efc42db4-e38f-45fc-955b-afa1417ff65e {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.016315] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Reconfiguring VM instance instance-0000007e to attach disk [datastore2] b69391a0-57b0-469a-ac86-7f1fd3e1fad6/b69391a0-57b0-469a-ac86-7f1fd3e1fad6.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1343.016556] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0a31fac5-105b-4544-b7b8-a915b95193fa {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.033150] env[68674]: DEBUG oslo_vmware.api [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1343.033150] env[68674]: value = "task-3241150" [ 1343.033150] env[68674]: _type = "Task" [ 1343.033150] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.041520] env[68674]: DEBUG oslo_vmware.api [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241150, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.543338] env[68674]: DEBUG oslo_vmware.api [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241150, 'name': ReconfigVM_Task, 'duration_secs': 0.252731} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.543626] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Reconfigured VM instance instance-0000007e to attach disk [datastore2] b69391a0-57b0-469a-ac86-7f1fd3e1fad6/b69391a0-57b0-469a-ac86-7f1fd3e1fad6.vmdk or device None with type thin {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1343.544469] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-151f1cd8-0e40-4e42-a2b3-b0c9a96ef128 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.561600] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7211ef66-87d7-4e70-8c63-a82b62efb1f5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.578462] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf3f6dce-883c-462e-8b25-5a8a25f4d3d0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.595299] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30965554-8a51-462e-9273-37b827106039 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.601408] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1343.601621] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2905c791-afb4-433d-b3f8-7c0a2db84246 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.607127] env[68674]: DEBUG oslo_vmware.api [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1343.607127] env[68674]: value = "task-3241151" [ 1343.607127] env[68674]: _type = "Task" [ 1343.607127] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.614062] env[68674]: DEBUG oslo_vmware.api [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241151, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.117856] env[68674]: DEBUG oslo_vmware.api [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241151, 'name': PowerOnVM_Task, 'duration_secs': 0.355131} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.118223] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1345.129200] env[68674]: INFO nova.compute.manager [None req-04490cb2-7085-48d0-90ff-6185c010dd1e tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Updating instance to original state: 'active' [ 1346.970298] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8fabc6ee-d7d2-41de-b598-1597cca48f90 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "b69391a0-57b0-469a-ac86-7f1fd3e1fad6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1346.970770] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8fabc6ee-d7d2-41de-b598-1597cca48f90 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "b69391a0-57b0-469a-ac86-7f1fd3e1fad6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1346.970770] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8fabc6ee-d7d2-41de-b598-1597cca48f90 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "b69391a0-57b0-469a-ac86-7f1fd3e1fad6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1346.970894] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8fabc6ee-d7d2-41de-b598-1597cca48f90 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "b69391a0-57b0-469a-ac86-7f1fd3e1fad6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1346.971101] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8fabc6ee-d7d2-41de-b598-1597cca48f90 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "b69391a0-57b0-469a-ac86-7f1fd3e1fad6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1346.973339] env[68674]: INFO nova.compute.manager [None req-8fabc6ee-d7d2-41de-b598-1597cca48f90 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Terminating instance [ 1347.477434] env[68674]: DEBUG nova.compute.manager [None req-8fabc6ee-d7d2-41de-b598-1597cca48f90 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1347.477663] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8fabc6ee-d7d2-41de-b598-1597cca48f90 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1347.478620] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd88fb3b-b3b1-4c1c-ada7-22276c45ed66 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.486345] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fabc6ee-d7d2-41de-b598-1597cca48f90 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1347.486575] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d3abbb97-f6e5-427a-8dd6-20e55103239c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.493015] env[68674]: DEBUG oslo_vmware.api [None req-8fabc6ee-d7d2-41de-b598-1597cca48f90 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1347.493015] env[68674]: value = "task-3241152" [ 1347.493015] env[68674]: _type = "Task" [ 1347.493015] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.500422] env[68674]: DEBUG oslo_vmware.api [None req-8fabc6ee-d7d2-41de-b598-1597cca48f90 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241152, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.002912] env[68674]: DEBUG oslo_vmware.api [None req-8fabc6ee-d7d2-41de-b598-1597cca48f90 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241152, 'name': PowerOffVM_Task, 'duration_secs': 0.182537} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.003295] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fabc6ee-d7d2-41de-b598-1597cca48f90 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1348.003363] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8fabc6ee-d7d2-41de-b598-1597cca48f90 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1348.003603] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-56d28d03-ea1d-49ea-b4bd-bfca8a8e8a8b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.072779] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8fabc6ee-d7d2-41de-b598-1597cca48f90 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1348.073028] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8fabc6ee-d7d2-41de-b598-1597cca48f90 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1348.073225] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fabc6ee-d7d2-41de-b598-1597cca48f90 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Deleting the datastore file [datastore2] b69391a0-57b0-469a-ac86-7f1fd3e1fad6 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1348.073536] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ee09ac69-6265-4cdf-9b2b-07a0cf73fe26 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.079505] env[68674]: DEBUG oslo_vmware.api [None req-8fabc6ee-d7d2-41de-b598-1597cca48f90 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1348.079505] env[68674]: value = "task-3241154" [ 1348.079505] env[68674]: _type = "Task" [ 1348.079505] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.086786] env[68674]: DEBUG oslo_vmware.api [None req-8fabc6ee-d7d2-41de-b598-1597cca48f90 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241154, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.589098] env[68674]: DEBUG oslo_vmware.api [None req-8fabc6ee-d7d2-41de-b598-1597cca48f90 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241154, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143248} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.589356] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fabc6ee-d7d2-41de-b598-1597cca48f90 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1348.589543] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8fabc6ee-d7d2-41de-b598-1597cca48f90 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1348.589718] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8fabc6ee-d7d2-41de-b598-1597cca48f90 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1348.589900] env[68674]: INFO nova.compute.manager [None req-8fabc6ee-d7d2-41de-b598-1597cca48f90 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1348.590155] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8fabc6ee-d7d2-41de-b598-1597cca48f90 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1348.590345] env[68674]: DEBUG nova.compute.manager [-] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1348.590437] env[68674]: DEBUG nova.network.neutron [-] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1349.011230] env[68674]: DEBUG nova.compute.manager [req-4688ed62-0a4c-47e2-a9c3-3023a3cce71d req-319723f9-6aec-4022-a2cd-b1853d9fbee5 service nova] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Received event network-vif-deleted-c2c63fe4-69d1-4322-981a-58d2974426aa {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1349.011555] env[68674]: INFO nova.compute.manager [req-4688ed62-0a4c-47e2-a9c3-3023a3cce71d req-319723f9-6aec-4022-a2cd-b1853d9fbee5 service nova] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Neutron deleted interface c2c63fe4-69d1-4322-981a-58d2974426aa; detaching it from the instance and deleting it from the info cache [ 1349.011623] env[68674]: DEBUG nova.network.neutron [req-4688ed62-0a4c-47e2-a9c3-3023a3cce71d req-319723f9-6aec-4022-a2cd-b1853d9fbee5 service nova] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1349.496565] env[68674]: DEBUG nova.network.neutron [-] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1349.514136] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-328b6c8c-6e98-4c5a-ab41-d641d9cf1f5c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.524277] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2ec7605-27d0-478d-ba7d-24f139dfc2e9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.549066] env[68674]: DEBUG nova.compute.manager [req-4688ed62-0a4c-47e2-a9c3-3023a3cce71d req-319723f9-6aec-4022-a2cd-b1853d9fbee5 service nova] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Detach interface failed, port_id=c2c63fe4-69d1-4322-981a-58d2974426aa, reason: Instance b69391a0-57b0-469a-ac86-7f1fd3e1fad6 could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1349.999096] env[68674]: INFO nova.compute.manager [-] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Took 1.41 seconds to deallocate network for instance. [ 1350.505678] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8fabc6ee-d7d2-41de-b598-1597cca48f90 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1350.506033] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8fabc6ee-d7d2-41de-b598-1597cca48f90 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1350.506211] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8fabc6ee-d7d2-41de-b598-1597cca48f90 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1350.528551] env[68674]: INFO nova.scheduler.client.report [None req-8fabc6ee-d7d2-41de-b598-1597cca48f90 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Deleted allocations for instance b69391a0-57b0-469a-ac86-7f1fd3e1fad6 [ 1351.036528] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8fabc6ee-d7d2-41de-b598-1597cca48f90 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "b69391a0-57b0-469a-ac86-7f1fd3e1fad6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.066s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1351.796428] env[68674]: DEBUG oslo_concurrency.lockutils [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "cd3c4d97-180c-4942-9025-a0d3b4eceec8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1351.796676] env[68674]: DEBUG oslo_concurrency.lockutils [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "cd3c4d97-180c-4942-9025-a0d3b4eceec8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1352.299271] env[68674]: DEBUG nova.compute.manager [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Starting instance... {{(pid=68674) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1352.818764] env[68674]: DEBUG oslo_concurrency.lockutils [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1352.818764] env[68674]: DEBUG oslo_concurrency.lockutils [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1352.820087] env[68674]: INFO nova.compute.claims [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1353.854099] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1075fc2-e644-4dad-81c9-fcc421ab5fbf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.862052] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64626098-8d8e-4a55-a255-817b7b102222 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.891992] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a19ec41a-67c3-494c-a027-e351b22d8bc8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.898643] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-887ead11-36bb-4b34-8aab-8209939a7513 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.911219] env[68674]: DEBUG nova.compute.provider_tree [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1354.414040] env[68674]: DEBUG nova.scheduler.client.report [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1354.918950] env[68674]: DEBUG oslo_concurrency.lockutils [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.100s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1354.919548] env[68674]: DEBUG nova.compute.manager [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Start building networks asynchronously for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1355.424872] env[68674]: DEBUG nova.compute.utils [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Using /dev/sd instead of None {{(pid=68674) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1355.426360] env[68674]: DEBUG nova.compute.manager [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Allocating IP information in the background. {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1355.426531] env[68674]: DEBUG nova.network.neutron [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] allocate_for_instance() {{(pid=68674) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1355.473762] env[68674]: DEBUG nova.policy [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '34b9ef7eae4a4cceba2fa699ce38ac0d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6f2a133c72064227bd419d63d5d9557f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68674) authorize /opt/stack/nova/nova/policy.py:192}} [ 1355.718683] env[68674]: DEBUG nova.network.neutron [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Successfully created port: 2f9bdfe8-9feb-4cc4-8ab7-ec50b22d14dd {{(pid=68674) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1355.930295] env[68674]: DEBUG nova.compute.manager [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Start building block device mappings for instance. {{(pid=68674) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1356.940729] env[68674]: DEBUG nova.compute.manager [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Start spawning the instance on the hypervisor. {{(pid=68674) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1356.967879] env[68674]: DEBUG nova.virt.hardware [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:04:51Z,direct_url=,disk_format='vmdk',id=b84d9354-ef6b-46ca-9dae-6549fa89bbea,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='bbb86a948b114fbc93d96c17f472fc3e',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:04:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1356.968186] env[68674]: DEBUG nova.virt.hardware [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Flavor limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1356.968501] env[68674]: DEBUG nova.virt.hardware [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Image limits 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1356.968571] env[68674]: DEBUG nova.virt.hardware [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Flavor pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1356.968678] env[68674]: DEBUG nova.virt.hardware [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Image pref 0:0:0 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1356.968826] env[68674]: DEBUG nova.virt.hardware [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68674) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1356.969078] env[68674]: DEBUG nova.virt.hardware [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1356.969269] env[68674]: DEBUG nova.virt.hardware [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1356.969512] env[68674]: DEBUG nova.virt.hardware [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Got 1 possible topologies {{(pid=68674) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1356.969604] env[68674]: DEBUG nova.virt.hardware [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1356.969772] env[68674]: DEBUG nova.virt.hardware [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68674) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1356.970725] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2de0dce7-5c78-4084-a397-f654695a7c35 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.979163] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2855bd45-d217-4512-9ecc-a53ca93586f7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.061375] env[68674]: DEBUG nova.compute.manager [req-720d37bb-2c07-46d1-af3b-61e91eb00c80 req-42f50db8-f837-4dac-a83d-8ffa0e5f54b9 service nova] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Received event network-vif-plugged-2f9bdfe8-9feb-4cc4-8ab7-ec50b22d14dd {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1357.061530] env[68674]: DEBUG oslo_concurrency.lockutils [req-720d37bb-2c07-46d1-af3b-61e91eb00c80 req-42f50db8-f837-4dac-a83d-8ffa0e5f54b9 service nova] Acquiring lock "cd3c4d97-180c-4942-9025-a0d3b4eceec8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1357.061740] env[68674]: DEBUG oslo_concurrency.lockutils [req-720d37bb-2c07-46d1-af3b-61e91eb00c80 req-42f50db8-f837-4dac-a83d-8ffa0e5f54b9 service nova] Lock "cd3c4d97-180c-4942-9025-a0d3b4eceec8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1357.061907] env[68674]: DEBUG oslo_concurrency.lockutils [req-720d37bb-2c07-46d1-af3b-61e91eb00c80 req-42f50db8-f837-4dac-a83d-8ffa0e5f54b9 service nova] Lock "cd3c4d97-180c-4942-9025-a0d3b4eceec8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1357.062100] env[68674]: DEBUG nova.compute.manager [req-720d37bb-2c07-46d1-af3b-61e91eb00c80 req-42f50db8-f837-4dac-a83d-8ffa0e5f54b9 service nova] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] No waiting events found dispatching network-vif-plugged-2f9bdfe8-9feb-4cc4-8ab7-ec50b22d14dd {{(pid=68674) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1357.062272] env[68674]: WARNING nova.compute.manager [req-720d37bb-2c07-46d1-af3b-61e91eb00c80 req-42f50db8-f837-4dac-a83d-8ffa0e5f54b9 service nova] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Received unexpected event network-vif-plugged-2f9bdfe8-9feb-4cc4-8ab7-ec50b22d14dd for instance with vm_state building and task_state spawning. [ 1357.139106] env[68674]: DEBUG nova.network.neutron [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Successfully updated port: 2f9bdfe8-9feb-4cc4-8ab7-ec50b22d14dd {{(pid=68674) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1357.641394] env[68674]: DEBUG oslo_concurrency.lockutils [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "refresh_cache-cd3c4d97-180c-4942-9025-a0d3b4eceec8" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1357.641564] env[68674]: DEBUG oslo_concurrency.lockutils [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquired lock "refresh_cache-cd3c4d97-180c-4942-9025-a0d3b4eceec8" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1357.641733] env[68674]: DEBUG nova.network.neutron [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1358.183358] env[68674]: DEBUG nova.network.neutron [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Instance cache missing network info. {{(pid=68674) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1358.304485] env[68674]: DEBUG nova.network.neutron [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Updating instance_info_cache with network_info: [{"id": "2f9bdfe8-9feb-4cc4-8ab7-ec50b22d14dd", "address": "fa:16:3e:ba:da:5a", "network": {"id": "f2a6b57a-fec9-4bd2-9828-2b72f21f2393", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1479923638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f2a133c72064227bd419d63d5d9557f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f9bdfe8-9f", "ovs_interfaceid": "2f9bdfe8-9feb-4cc4-8ab7-ec50b22d14dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1358.754423] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1358.754653] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1358.806959] env[68674]: DEBUG oslo_concurrency.lockutils [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Releasing lock "refresh_cache-cd3c4d97-180c-4942-9025-a0d3b4eceec8" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1358.807279] env[68674]: DEBUG nova.compute.manager [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Instance network_info: |[{"id": "2f9bdfe8-9feb-4cc4-8ab7-ec50b22d14dd", "address": "fa:16:3e:ba:da:5a", "network": {"id": "f2a6b57a-fec9-4bd2-9828-2b72f21f2393", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1479923638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f2a133c72064227bd419d63d5d9557f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f9bdfe8-9f", "ovs_interfaceid": "2f9bdfe8-9feb-4cc4-8ab7-ec50b22d14dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68674) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1358.807679] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ba:da:5a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8868dc2-7767-49c0-a2ed-e611fcbf8414', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2f9bdfe8-9feb-4cc4-8ab7-ec50b22d14dd', 'vif_model': 'vmxnet3'}] {{(pid=68674) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1358.815042] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1358.815238] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Creating VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1358.815445] env[68674]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-044c5bab-b25a-4247-99ab-0f9ac63f2520 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.834285] env[68674]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1358.834285] env[68674]: value = "task-3241155" [ 1358.834285] env[68674]: _type = "Task" [ 1358.834285] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.844147] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241155, 'name': CreateVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.086838] env[68674]: DEBUG nova.compute.manager [req-52582a93-73f9-4a2b-afd8-e371583bc358 req-51bc5f55-10f8-43e1-833a-515cfd976719 service nova] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Received event network-changed-2f9bdfe8-9feb-4cc4-8ab7-ec50b22d14dd {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1359.087077] env[68674]: DEBUG nova.compute.manager [req-52582a93-73f9-4a2b-afd8-e371583bc358 req-51bc5f55-10f8-43e1-833a-515cfd976719 service nova] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Refreshing instance network info cache due to event network-changed-2f9bdfe8-9feb-4cc4-8ab7-ec50b22d14dd. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1359.087350] env[68674]: DEBUG oslo_concurrency.lockutils [req-52582a93-73f9-4a2b-afd8-e371583bc358 req-51bc5f55-10f8-43e1-833a-515cfd976719 service nova] Acquiring lock "refresh_cache-cd3c4d97-180c-4942-9025-a0d3b4eceec8" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1359.087508] env[68674]: DEBUG oslo_concurrency.lockutils [req-52582a93-73f9-4a2b-afd8-e371583bc358 req-51bc5f55-10f8-43e1-833a-515cfd976719 service nova] Acquired lock "refresh_cache-cd3c4d97-180c-4942-9025-a0d3b4eceec8" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1359.087687] env[68674]: DEBUG nova.network.neutron [req-52582a93-73f9-4a2b-afd8-e371583bc358 req-51bc5f55-10f8-43e1-833a-515cfd976719 service nova] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Refreshing network info cache for port 2f9bdfe8-9feb-4cc4-8ab7-ec50b22d14dd {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1359.259089] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1359.259335] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1359.259335] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1359.259464] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1359.259600] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1359.259719] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1359.259854] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68674) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1359.259996] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager.update_available_resource {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1359.343922] env[68674]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241155, 'name': CreateVM_Task, 'duration_secs': 0.311674} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.344072] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Created VM on the ESX host {{(pid=68674) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1359.344699] env[68674]: DEBUG oslo_concurrency.lockutils [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1359.344868] env[68674]: DEBUG oslo_concurrency.lockutils [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1359.345200] env[68674]: DEBUG oslo_concurrency.lockutils [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1359.345440] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ba6208e-5df2-482c-adce-ae39cc62f27c {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.349678] env[68674]: DEBUG oslo_vmware.api [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1359.349678] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525f8032-6178-6ab2-14d9-82e176767f90" [ 1359.349678] env[68674]: _type = "Task" [ 1359.349678] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.356628] env[68674]: DEBUG oslo_vmware.api [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525f8032-6178-6ab2-14d9-82e176767f90, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.756937] env[68674]: DEBUG nova.network.neutron [req-52582a93-73f9-4a2b-afd8-e371583bc358 req-51bc5f55-10f8-43e1-833a-515cfd976719 service nova] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Updated VIF entry in instance network info cache for port 2f9bdfe8-9feb-4cc4-8ab7-ec50b22d14dd. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1359.757336] env[68674]: DEBUG nova.network.neutron [req-52582a93-73f9-4a2b-afd8-e371583bc358 req-51bc5f55-10f8-43e1-833a-515cfd976719 service nova] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Updating instance_info_cache with network_info: [{"id": "2f9bdfe8-9feb-4cc4-8ab7-ec50b22d14dd", "address": "fa:16:3e:ba:da:5a", "network": {"id": "f2a6b57a-fec9-4bd2-9828-2b72f21f2393", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1479923638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f2a133c72064227bd419d63d5d9557f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f9bdfe8-9f", "ovs_interfaceid": "2f9bdfe8-9feb-4cc4-8ab7-ec50b22d14dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1359.763677] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1359.763894] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1359.764069] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1359.764223] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68674) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1359.765043] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83e12937-982c-4202-98b0-e9f5cf84e64d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.773960] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bdb32e2-adf2-4bae-b9d8-23e55cf85d76 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.788652] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b65227df-6b7e-4c5e-b8b2-07e720e55905 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.794564] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-597a0965-2261-4049-a77c-a51bafd3cf21 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.822603] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180794MB free_disk=120GB free_vcpus=48 pci_devices=None {{(pid=68674) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1359.822751] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1359.822967] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1359.858591] env[68674]: DEBUG oslo_vmware.api [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]525f8032-6178-6ab2-14d9-82e176767f90, 'name': SearchDatastore_Task, 'duration_secs': 0.010934} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.858895] env[68674]: DEBUG oslo_concurrency.lockutils [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1359.859158] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Processing image b84d9354-ef6b-46ca-9dae-6549fa89bbea {{(pid=68674) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1359.859397] env[68674]: DEBUG oslo_concurrency.lockutils [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1359.859544] env[68674]: DEBUG oslo_concurrency.lockutils [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1359.859718] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1359.859961] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cf61dffd-fc0e-412a-b20c-614bb250f1dc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.868322] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68674) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1359.868509] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68674) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1359.869210] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4585b89d-4f40-4a62-ac76-bc6a3fe7ef7a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.874046] env[68674]: DEBUG oslo_vmware.api [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1359.874046] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52adeb6f-3f05-8c32-1520-e1b172022f8a" [ 1359.874046] env[68674]: _type = "Task" [ 1359.874046] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.881166] env[68674]: DEBUG oslo_vmware.api [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52adeb6f-3f05-8c32-1520-e1b172022f8a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.260066] env[68674]: DEBUG oslo_concurrency.lockutils [req-52582a93-73f9-4a2b-afd8-e371583bc358 req-51bc5f55-10f8-43e1-833a-515cfd976719 service nova] Releasing lock "refresh_cache-cd3c4d97-180c-4942-9025-a0d3b4eceec8" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1360.384676] env[68674]: DEBUG oslo_vmware.api [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52adeb6f-3f05-8c32-1520-e1b172022f8a, 'name': SearchDatastore_Task, 'duration_secs': 0.008168} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.385421] env[68674]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c58766f-9d1f-4113-a87f-7a09b9fd3350 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.390581] env[68674]: DEBUG oslo_vmware.api [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1360.390581] env[68674]: value = "session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52bec16d-c434-51c3-3da5-6b347d8cd45f" [ 1360.390581] env[68674]: _type = "Task" [ 1360.390581] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.397319] env[68674]: DEBUG oslo_vmware.api [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52bec16d-c434-51c3-3da5-6b347d8cd45f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.849543] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance cd3c4d97-180c-4942-9025-a0d3b4eceec8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1360.849774] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=68674) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1360.849924] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=68674) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1360.877191] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b459182b-bb90-46d4-afb8-b2011acde2b5 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.885294] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-299328c4-6d68-4f14-9fd4-e5456e110a2a {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.918089] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eed07a1f-cc00-45cf-b3ab-5735277be2b8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.925730] env[68674]: DEBUG oslo_vmware.api [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': session[52aa61fe-caff-67e9-cef0-2d4146ba2177]52bec16d-c434-51c3-3da5-6b347d8cd45f, 'name': SearchDatastore_Task, 'duration_secs': 0.009718} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.927857] env[68674]: DEBUG oslo_concurrency.lockutils [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1360.928182] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] cd3c4d97-180c-4942-9025-a0d3b4eceec8/cd3c4d97-180c-4942-9025-a0d3b4eceec8.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1360.928467] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-be47a6a8-e5fa-45c1-b083-1f0441eb939d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.931131] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c95f8e0-3eda-418e-be94-636261a1b734 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.944175] env[68674]: DEBUG nova.compute.provider_tree [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1360.947223] env[68674]: DEBUG oslo_vmware.api [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1360.947223] env[68674]: value = "task-3241156" [ 1360.947223] env[68674]: _type = "Task" [ 1360.947223] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.954858] env[68674]: DEBUG oslo_vmware.api [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241156, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.447127] env[68674]: DEBUG nova.scheduler.client.report [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1361.459345] env[68674]: DEBUG oslo_vmware.api [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241156, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.470355} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.459576] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b84d9354-ef6b-46ca-9dae-6549fa89bbea/b84d9354-ef6b-46ca-9dae-6549fa89bbea.vmdk to [datastore2] cd3c4d97-180c-4942-9025-a0d3b4eceec8/cd3c4d97-180c-4942-9025-a0d3b4eceec8.vmdk {{(pid=68674) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1361.459785] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Extending root virtual disk to 1048576 {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1361.460030] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-142cf3e3-f65f-469a-9f72-3f9a5d7fa84b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.467402] env[68674]: DEBUG oslo_vmware.api [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1361.467402] env[68674]: value = "task-3241157" [ 1361.467402] env[68674]: _type = "Task" [ 1361.467402] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.474909] env[68674]: DEBUG oslo_vmware.api [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241157, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.955598] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68674) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1361.955848] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.133s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1361.977104] env[68674]: DEBUG oslo_vmware.api [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241157, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080463} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.977358] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Extended root virtual disk {{(pid=68674) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1361.978136] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f38a5fb0-e357-4f36-8bd0-a225cb16a5f4 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.000446] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Reconfiguring VM instance instance-0000007f to attach disk [datastore2] cd3c4d97-180c-4942-9025-a0d3b4eceec8/cd3c4d97-180c-4942-9025-a0d3b4eceec8.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1362.000699] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab1720ed-7b49-466a-ab90-d5d1e930cdf0 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.020144] env[68674]: DEBUG oslo_vmware.api [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1362.020144] env[68674]: value = "task-3241158" [ 1362.020144] env[68674]: _type = "Task" [ 1362.020144] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.027499] env[68674]: DEBUG oslo_vmware.api [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241158, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.529847] env[68674]: DEBUG oslo_vmware.api [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241158, 'name': ReconfigVM_Task, 'duration_secs': 0.276632} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.530197] env[68674]: DEBUG nova.virt.vmwareapi.volumeops [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Reconfigured VM instance instance-0000007f to attach disk [datastore2] cd3c4d97-180c-4942-9025-a0d3b4eceec8/cd3c4d97-180c-4942-9025-a0d3b4eceec8.vmdk or device None with type sparse {{(pid=68674) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1362.530733] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-95be97ba-8843-4003-9a13-2b79a17b5d64 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.536644] env[68674]: DEBUG oslo_vmware.api [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1362.536644] env[68674]: value = "task-3241159" [ 1362.536644] env[68674]: _type = "Task" [ 1362.536644] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.543708] env[68674]: DEBUG oslo_vmware.api [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241159, 'name': Rename_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.046239] env[68674]: DEBUG oslo_vmware.api [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241159, 'name': Rename_Task, 'duration_secs': 0.14133} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.046519] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1363.046760] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4389b147-e4cc-407d-8f3b-2993a3bdcfbb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.052976] env[68674]: DEBUG oslo_vmware.api [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1363.052976] env[68674]: value = "task-3241160" [ 1363.052976] env[68674]: _type = "Task" [ 1363.052976] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.060202] env[68674]: DEBUG oslo_vmware.api [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241160, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.563298] env[68674]: DEBUG oslo_vmware.api [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241160, 'name': PowerOnVM_Task, 'duration_secs': 0.453668} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.563685] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1363.563745] env[68674]: INFO nova.compute.manager [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Took 6.62 seconds to spawn the instance on the hypervisor. [ 1363.563931] env[68674]: DEBUG nova.compute.manager [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1363.564728] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1db5066-2791-40f5-8a29-498dcd91ad1b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.084738] env[68674]: INFO nova.compute.manager [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Took 11.28 seconds to build instance. [ 1364.587369] env[68674]: DEBUG oslo_concurrency.lockutils [None req-351254c2-3317-47d5-b4e7-a9fd03ec3d8f tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "cd3c4d97-180c-4942-9025-a0d3b4eceec8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.790s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1364.796893] env[68674]: DEBUG nova.compute.manager [req-9ee5a3f7-cb30-4d4a-a1e4-1dd20b364447 req-38a5fa67-71b4-4185-8f63-3714e84b2441 service nova] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Received event network-changed-2f9bdfe8-9feb-4cc4-8ab7-ec50b22d14dd {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1364.797094] env[68674]: DEBUG nova.compute.manager [req-9ee5a3f7-cb30-4d4a-a1e4-1dd20b364447 req-38a5fa67-71b4-4185-8f63-3714e84b2441 service nova] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Refreshing instance network info cache due to event network-changed-2f9bdfe8-9feb-4cc4-8ab7-ec50b22d14dd. {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1364.797319] env[68674]: DEBUG oslo_concurrency.lockutils [req-9ee5a3f7-cb30-4d4a-a1e4-1dd20b364447 req-38a5fa67-71b4-4185-8f63-3714e84b2441 service nova] Acquiring lock "refresh_cache-cd3c4d97-180c-4942-9025-a0d3b4eceec8" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1364.797455] env[68674]: DEBUG oslo_concurrency.lockutils [req-9ee5a3f7-cb30-4d4a-a1e4-1dd20b364447 req-38a5fa67-71b4-4185-8f63-3714e84b2441 service nova] Acquired lock "refresh_cache-cd3c4d97-180c-4942-9025-a0d3b4eceec8" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1364.797615] env[68674]: DEBUG nova.network.neutron [req-9ee5a3f7-cb30-4d4a-a1e4-1dd20b364447 req-38a5fa67-71b4-4185-8f63-3714e84b2441 service nova] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Refreshing network info cache for port 2f9bdfe8-9feb-4cc4-8ab7-ec50b22d14dd {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1365.494891] env[68674]: DEBUG nova.network.neutron [req-9ee5a3f7-cb30-4d4a-a1e4-1dd20b364447 req-38a5fa67-71b4-4185-8f63-3714e84b2441 service nova] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Updated VIF entry in instance network info cache for port 2f9bdfe8-9feb-4cc4-8ab7-ec50b22d14dd. {{(pid=68674) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1365.495300] env[68674]: DEBUG nova.network.neutron [req-9ee5a3f7-cb30-4d4a-a1e4-1dd20b364447 req-38a5fa67-71b4-4185-8f63-3714e84b2441 service nova] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Updating instance_info_cache with network_info: [{"id": "2f9bdfe8-9feb-4cc4-8ab7-ec50b22d14dd", "address": "fa:16:3e:ba:da:5a", "network": {"id": "f2a6b57a-fec9-4bd2-9828-2b72f21f2393", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1479923638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.236", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f2a133c72064227bd419d63d5d9557f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f9bdfe8-9f", "ovs_interfaceid": "2f9bdfe8-9feb-4cc4-8ab7-ec50b22d14dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1365.997821] env[68674]: DEBUG oslo_concurrency.lockutils [req-9ee5a3f7-cb30-4d4a-a1e4-1dd20b364447 req-38a5fa67-71b4-4185-8f63-3714e84b2441 service nova] Releasing lock "refresh_cache-cd3c4d97-180c-4942-9025-a0d3b4eceec8" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1401.748583] env[68674]: DEBUG oslo_concurrency.lockutils [None req-47eb9cb7-a00a-43b4-b29b-30a8b95b84fd tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "cd3c4d97-180c-4942-9025-a0d3b4eceec8" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1401.749147] env[68674]: DEBUG oslo_concurrency.lockutils [None req-47eb9cb7-a00a-43b4-b29b-30a8b95b84fd tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "cd3c4d97-180c-4942-9025-a0d3b4eceec8" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1401.749147] env[68674]: DEBUG nova.compute.manager [None req-47eb9cb7-a00a-43b4-b29b-30a8b95b84fd tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1401.750077] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7363f5b-4085-434a-a2d6-abbd9ed68b43 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.758053] env[68674]: DEBUG nova.compute.manager [None req-47eb9cb7-a00a-43b4-b29b-30a8b95b84fd tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68674) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1401.758053] env[68674]: DEBUG nova.objects.instance [None req-47eb9cb7-a00a-43b4-b29b-30a8b95b84fd tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lazy-loading 'flavor' on Instance uuid cd3c4d97-180c-4942-9025-a0d3b4eceec8 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1402.765762] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-47eb9cb7-a00a-43b4-b29b-30a8b95b84fd tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1402.766148] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2164d0f3-30b7-4458-8274-c84f7041477d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.773605] env[68674]: DEBUG oslo_vmware.api [None req-47eb9cb7-a00a-43b4-b29b-30a8b95b84fd tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1402.773605] env[68674]: value = "task-3241161" [ 1402.773605] env[68674]: _type = "Task" [ 1402.773605] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.781524] env[68674]: DEBUG oslo_vmware.api [None req-47eb9cb7-a00a-43b4-b29b-30a8b95b84fd tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241161, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.283947] env[68674]: DEBUG oslo_vmware.api [None req-47eb9cb7-a00a-43b4-b29b-30a8b95b84fd tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241161, 'name': PowerOffVM_Task, 'duration_secs': 0.201531} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.284240] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-47eb9cb7-a00a-43b4-b29b-30a8b95b84fd tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1403.284434] env[68674]: DEBUG nova.compute.manager [None req-47eb9cb7-a00a-43b4-b29b-30a8b95b84fd tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1403.285193] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-048502f4-0e2f-457d-8c22-d4e8f83d42e6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.796168] env[68674]: DEBUG oslo_concurrency.lockutils [None req-47eb9cb7-a00a-43b4-b29b-30a8b95b84fd tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "cd3c4d97-180c-4942-9025-a0d3b4eceec8" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.047s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1404.140171] env[68674]: DEBUG nova.objects.instance [None req-ffca3d30-c723-4889-8aec-9f61b10927ec tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lazy-loading 'flavor' on Instance uuid cd3c4d97-180c-4942-9025-a0d3b4eceec8 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1404.645663] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ffca3d30-c723-4889-8aec-9f61b10927ec tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "refresh_cache-cd3c4d97-180c-4942-9025-a0d3b4eceec8" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1404.645850] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ffca3d30-c723-4889-8aec-9f61b10927ec tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquired lock "refresh_cache-cd3c4d97-180c-4942-9025-a0d3b4eceec8" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1404.645999] env[68674]: DEBUG nova.network.neutron [None req-ffca3d30-c723-4889-8aec-9f61b10927ec tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1404.646209] env[68674]: DEBUG nova.objects.instance [None req-ffca3d30-c723-4889-8aec-9f61b10927ec tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lazy-loading 'info_cache' on Instance uuid cd3c4d97-180c-4942-9025-a0d3b4eceec8 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1405.149766] env[68674]: DEBUG nova.objects.base [None req-ffca3d30-c723-4889-8aec-9f61b10927ec tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=68674) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1405.836687] env[68674]: DEBUG nova.network.neutron [None req-ffca3d30-c723-4889-8aec-9f61b10927ec tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Updating instance_info_cache with network_info: [{"id": "2f9bdfe8-9feb-4cc4-8ab7-ec50b22d14dd", "address": "fa:16:3e:ba:da:5a", "network": {"id": "f2a6b57a-fec9-4bd2-9828-2b72f21f2393", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1479923638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.236", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f2a133c72064227bd419d63d5d9557f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f9bdfe8-9f", "ovs_interfaceid": "2f9bdfe8-9feb-4cc4-8ab7-ec50b22d14dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1406.339715] env[68674]: DEBUG oslo_concurrency.lockutils [None req-ffca3d30-c723-4889-8aec-9f61b10927ec tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Releasing lock "refresh_cache-cd3c4d97-180c-4942-9025-a0d3b4eceec8" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1407.346463] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffca3d30-c723-4889-8aec-9f61b10927ec tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Powering on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1407.346827] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-48479139-16e8-4b68-a771-9456decbcd64 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.354380] env[68674]: DEBUG oslo_vmware.api [None req-ffca3d30-c723-4889-8aec-9f61b10927ec tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1407.354380] env[68674]: value = "task-3241162" [ 1407.354380] env[68674]: _type = "Task" [ 1407.354380] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.361647] env[68674]: DEBUG oslo_vmware.api [None req-ffca3d30-c723-4889-8aec-9f61b10927ec tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241162, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.863287] env[68674]: DEBUG oslo_vmware.api [None req-ffca3d30-c723-4889-8aec-9f61b10927ec tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241162, 'name': PowerOnVM_Task, 'duration_secs': 0.366529} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.863571] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffca3d30-c723-4889-8aec-9f61b10927ec tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Powered on the VM {{(pid=68674) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1407.863731] env[68674]: DEBUG nova.compute.manager [None req-ffca3d30-c723-4889-8aec-9f61b10927ec tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1407.864471] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0213b9c0-2b2f-45be-a7a8-44649eee3605 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.874144] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c85f6879-7b2c-4f17-872d-98530acd391d {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.881330] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-eef0ac08-aa55-41b5-be67-ea135c4f6277 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Suspending the VM {{(pid=68674) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1408.881557] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-e5e837af-2b0f-43aa-94b0-c4f2ab938ca6 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.887336] env[68674]: DEBUG oslo_vmware.api [None req-eef0ac08-aa55-41b5-be67-ea135c4f6277 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1408.887336] env[68674]: value = "task-3241163" [ 1408.887336] env[68674]: _type = "Task" [ 1408.887336] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.894860] env[68674]: DEBUG oslo_vmware.api [None req-eef0ac08-aa55-41b5-be67-ea135c4f6277 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241163, 'name': SuspendVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.399117] env[68674]: DEBUG oslo_vmware.api [None req-eef0ac08-aa55-41b5-be67-ea135c4f6277 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241163, 'name': SuspendVM_Task} progress is 70%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.899279] env[68674]: DEBUG oslo_vmware.api [None req-eef0ac08-aa55-41b5-be67-ea135c4f6277 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241163, 'name': SuspendVM_Task, 'duration_secs': 0.72634} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.899677] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-eef0ac08-aa55-41b5-be67-ea135c4f6277 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Suspended the VM {{(pid=68674) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1409.899677] env[68674]: DEBUG nova.compute.manager [None req-eef0ac08-aa55-41b5-be67-ea135c4f6277 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1409.900416] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6549d939-b847-4f39-86ed-12a46feca709 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.184022] env[68674]: INFO nova.compute.manager [None req-600e8271-f682-46db-9de8-dce72de5f671 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Resuming [ 1411.184665] env[68674]: DEBUG nova.objects.instance [None req-600e8271-f682-46db-9de8-dce72de5f671 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lazy-loading 'flavor' on Instance uuid cd3c4d97-180c-4942-9025-a0d3b4eceec8 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1412.366513] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1412.366842] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1412.366842] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1412.366990] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1412.367151] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1412.367294] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1412.367437] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1412.367571] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68674) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1412.367771] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager.update_available_resource {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1412.695797] env[68674]: DEBUG oslo_concurrency.lockutils [None req-600e8271-f682-46db-9de8-dce72de5f671 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "refresh_cache-cd3c4d97-180c-4942-9025-a0d3b4eceec8" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1412.695997] env[68674]: DEBUG oslo_concurrency.lockutils [None req-600e8271-f682-46db-9de8-dce72de5f671 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquired lock "refresh_cache-cd3c4d97-180c-4942-9025-a0d3b4eceec8" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1412.696140] env[68674]: DEBUG nova.network.neutron [None req-600e8271-f682-46db-9de8-dce72de5f671 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Building network info cache for instance {{(pid=68674) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1412.870403] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1412.870680] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1412.870869] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1412.871048] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68674) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1412.871906] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1198f02c-dd08-4af0-ab81-92961cb7ecbd {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.880140] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-825c5f96-c229-46d3-b429-df0260df34b7 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.893290] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b80708e8-3919-4920-9b19-78d92ea2e473 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.899246] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ec1f460-14eb-473c-9e4f-9ccb2b6c5f22 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.928736] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180989MB free_disk=120GB free_vcpus=48 pci_devices=None {{(pid=68674) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1412.928919] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1412.929122] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1413.380984] env[68674]: DEBUG nova.network.neutron [None req-600e8271-f682-46db-9de8-dce72de5f671 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Updating instance_info_cache with network_info: [{"id": "2f9bdfe8-9feb-4cc4-8ab7-ec50b22d14dd", "address": "fa:16:3e:ba:da:5a", "network": {"id": "f2a6b57a-fec9-4bd2-9828-2b72f21f2393", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1479923638-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.236", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f2a133c72064227bd419d63d5d9557f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8868dc2-7767-49c0-a2ed-e611fcbf8414", "external-id": "nsx-vlan-transportzone-158", "segmentation_id": 158, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f9bdfe8-9f", "ovs_interfaceid": "2f9bdfe8-9feb-4cc4-8ab7-ec50b22d14dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1413.883715] env[68674]: DEBUG oslo_concurrency.lockutils [None req-600e8271-f682-46db-9de8-dce72de5f671 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Releasing lock "refresh_cache-cd3c4d97-180c-4942-9025-a0d3b4eceec8" {{(pid=68674) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1413.884683] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ed0240c-480e-4d05-98cc-43120c522e25 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.891278] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-600e8271-f682-46db-9de8-dce72de5f671 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Resuming the VM {{(pid=68674) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1413.891487] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ecc0b065-fa23-4dca-93ac-4b93782df532 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.897036] env[68674]: DEBUG oslo_vmware.api [None req-600e8271-f682-46db-9de8-dce72de5f671 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1413.897036] env[68674]: value = "task-3241164" [ 1413.897036] env[68674]: _type = "Task" [ 1413.897036] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.904080] env[68674]: DEBUG oslo_vmware.api [None req-600e8271-f682-46db-9de8-dce72de5f671 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241164, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.020077] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Instance cd3c4d97-180c-4942-9025-a0d3b4eceec8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68674) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1414.020281] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=68674) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1414.020426] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=68674) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1414.117228] env[68674]: DEBUG nova.scheduler.client.report [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Refreshing inventories for resource provider ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1414.130635] env[68674]: DEBUG nova.scheduler.client.report [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Updating ProviderTree inventory for provider ade3f042-7427-494b-9654-0b65e074850c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1414.130829] env[68674]: DEBUG nova.compute.provider_tree [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Updating inventory in ProviderTree for provider ade3f042-7427-494b-9654-0b65e074850c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1414.140984] env[68674]: DEBUG nova.scheduler.client.report [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Refreshing aggregate associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, aggregates: None {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1414.157901] env[68674]: DEBUG nova.scheduler.client.report [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Refreshing trait associations for resource provider ade3f042-7427-494b-9654-0b65e074850c, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68674) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1414.181747] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fba7d843-ffcd-4bac-a959-21c876b6a68b {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.189303] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b1953a9-bee7-49c3-85be-ef8c5da385dc {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.219991] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40da010a-68b6-49e2-8aea-e9386b50e8fb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.228216] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdc459de-f188-4fef-9995-31651fb6a5e8 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.242227] env[68674]: DEBUG nova.compute.provider_tree [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1414.407791] env[68674]: DEBUG oslo_vmware.api [None req-600e8271-f682-46db-9de8-dce72de5f671 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241164, 'name': PowerOnVM_Task, 'duration_secs': 0.474883} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.408173] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-600e8271-f682-46db-9de8-dce72de5f671 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Resumed the VM {{(pid=68674) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1414.408246] env[68674]: DEBUG nova.compute.manager [None req-600e8271-f682-46db-9de8-dce72de5f671 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Checking state {{(pid=68674) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1414.409131] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b5cfa26-f3f8-4271-aa1f-556d5e63c507 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.745527] env[68674]: DEBUG nova.scheduler.client.report [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1415.249893] env[68674]: DEBUG nova.compute.resource_tracker [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68674) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1415.250208] env[68674]: DEBUG oslo_concurrency.lockutils [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.321s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1415.250514] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1415.250722] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Cleaning up deleted instances with incomplete migration {{(pid=68674) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11872}} [ 1415.754137] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1415.849052] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8b29bde2-3971-4cb1-ace1-6354a7949946 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "cd3c4d97-180c-4942-9025-a0d3b4eceec8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1415.849272] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8b29bde2-3971-4cb1-ace1-6354a7949946 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "cd3c4d97-180c-4942-9025-a0d3b4eceec8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1415.849504] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8b29bde2-3971-4cb1-ace1-6354a7949946 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "cd3c4d97-180c-4942-9025-a0d3b4eceec8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1415.849662] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8b29bde2-3971-4cb1-ace1-6354a7949946 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "cd3c4d97-180c-4942-9025-a0d3b4eceec8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1415.849855] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8b29bde2-3971-4cb1-ace1-6354a7949946 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "cd3c4d97-180c-4942-9025-a0d3b4eceec8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1415.851821] env[68674]: INFO nova.compute.manager [None req-8b29bde2-3971-4cb1-ace1-6354a7949946 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Terminating instance [ 1416.355647] env[68674]: DEBUG nova.compute.manager [None req-8b29bde2-3971-4cb1-ace1-6354a7949946 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Start destroying the instance on the hypervisor. {{(pid=68674) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1416.355877] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8b29bde2-3971-4cb1-ace1-6354a7949946 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Destroying instance {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1416.356891] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-433dbd55-d06c-4b96-9f21-0eecbe58a640 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.364713] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b29bde2-3971-4cb1-ace1-6354a7949946 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Powering off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1416.364943] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dbb8bf59-d627-4d84-be74-df13afc790a2 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.370701] env[68674]: DEBUG oslo_vmware.api [None req-8b29bde2-3971-4cb1-ace1-6354a7949946 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1416.370701] env[68674]: value = "task-3241165" [ 1416.370701] env[68674]: _type = "Task" [ 1416.370701] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.378258] env[68674]: DEBUG oslo_vmware.api [None req-8b29bde2-3971-4cb1-ace1-6354a7949946 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241165, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.881928] env[68674]: DEBUG oslo_vmware.api [None req-8b29bde2-3971-4cb1-ace1-6354a7949946 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241165, 'name': PowerOffVM_Task, 'duration_secs': 0.205771} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.882277] env[68674]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b29bde2-3971-4cb1-ace1-6354a7949946 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Powered off the VM {{(pid=68674) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1416.882316] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8b29bde2-3971-4cb1-ace1-6354a7949946 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Unregistering the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1416.882614] env[68674]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7ebd242b-33da-40a6-9405-c35d59ec9329 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.953114] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8b29bde2-3971-4cb1-ace1-6354a7949946 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Unregistered the VM {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1416.953350] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8b29bde2-3971-4cb1-ace1-6354a7949946 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Deleting contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1416.953624] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b29bde2-3971-4cb1-ace1-6354a7949946 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Deleting the datastore file [datastore2] cd3c4d97-180c-4942-9025-a0d3b4eceec8 {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1416.953930] env[68674]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6754315d-4a57-41eb-9545-3f4357f55012 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.960555] env[68674]: DEBUG oslo_vmware.api [None req-8b29bde2-3971-4cb1-ace1-6354a7949946 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for the task: (returnval){ [ 1416.960555] env[68674]: value = "task-3241167" [ 1416.960555] env[68674]: _type = "Task" [ 1416.960555] env[68674]: } to complete. {{(pid=68674) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.968051] env[68674]: DEBUG oslo_vmware.api [None req-8b29bde2-3971-4cb1-ace1-6354a7949946 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241167, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.470231] env[68674]: DEBUG oslo_vmware.api [None req-8b29bde2-3971-4cb1-ace1-6354a7949946 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Task: {'id': task-3241167, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145358} completed successfully. {{(pid=68674) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.470505] env[68674]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b29bde2-3971-4cb1-ace1-6354a7949946 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Deleted the datastore file {{(pid=68674) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1417.470691] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8b29bde2-3971-4cb1-ace1-6354a7949946 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Deleted contents of the VM from datastore datastore2 {{(pid=68674) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1417.470869] env[68674]: DEBUG nova.virt.vmwareapi.vmops [None req-8b29bde2-3971-4cb1-ace1-6354a7949946 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Instance destroyed {{(pid=68674) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1417.471054] env[68674]: INFO nova.compute.manager [None req-8b29bde2-3971-4cb1-ace1-6354a7949946 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1417.471295] env[68674]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8b29bde2-3971-4cb1-ace1-6354a7949946 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68674) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1417.471488] env[68674]: DEBUG nova.compute.manager [-] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Deallocating network for instance {{(pid=68674) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1417.471581] env[68674]: DEBUG nova.network.neutron [-] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] deallocate_for_instance() {{(pid=68674) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1417.907744] env[68674]: DEBUG nova.compute.manager [req-d28c236d-4710-4020-895b-cdaff1ff126f req-d71e31a1-a44c-44c5-a401-02654a723451 service nova] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Received event network-vif-deleted-2f9bdfe8-9feb-4cc4-8ab7-ec50b22d14dd {{(pid=68674) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1417.907744] env[68674]: INFO nova.compute.manager [req-d28c236d-4710-4020-895b-cdaff1ff126f req-d71e31a1-a44c-44c5-a401-02654a723451 service nova] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Neutron deleted interface 2f9bdfe8-9feb-4cc4-8ab7-ec50b22d14dd; detaching it from the instance and deleting it from the info cache [ 1417.907744] env[68674]: DEBUG nova.network.neutron [req-d28c236d-4710-4020-895b-cdaff1ff126f req-d71e31a1-a44c-44c5-a401-02654a723451 service nova] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1418.390074] env[68674]: DEBUG nova.network.neutron [-] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Updating instance_info_cache with network_info: [] {{(pid=68674) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1418.409547] env[68674]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-67d91036-55d1-425b-a966-23d6c3de2b00 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.419846] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b67fa20-3376-4872-a63b-a98081436e00 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.443976] env[68674]: DEBUG nova.compute.manager [req-d28c236d-4710-4020-895b-cdaff1ff126f req-d71e31a1-a44c-44c5-a401-02654a723451 service nova] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Detach interface failed, port_id=2f9bdfe8-9feb-4cc4-8ab7-ec50b22d14dd, reason: Instance cd3c4d97-180c-4942-9025-a0d3b4eceec8 could not be found. {{(pid=68674) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1418.893065] env[68674]: INFO nova.compute.manager [-] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Took 1.42 seconds to deallocate network for instance. [ 1419.398915] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8b29bde2-3971-4cb1-ace1-6354a7949946 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1419.399275] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8b29bde2-3971-4cb1-ace1-6354a7949946 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1419.399404] env[68674]: DEBUG nova.objects.instance [None req-8b29bde2-3971-4cb1-ace1-6354a7949946 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lazy-loading 'resources' on Instance uuid cd3c4d97-180c-4942-9025-a0d3b4eceec8 {{(pid=68674) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1419.933430] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0da698a1-5349-4056-80e1-818278aa7abb {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.941237] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73cec974-1754-4eae-b7ac-b8134c05e2c1 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.972141] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74e7483e-47ed-444e-8594-82549ceaf8bf {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.979083] env[68674]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-404876ca-52be-4efd-b216-fbd2e964b0f9 {{(pid=68674) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.991768] env[68674]: DEBUG nova.compute.provider_tree [None req-8b29bde2-3971-4cb1-ace1-6354a7949946 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Inventory has not changed in ProviderTree for provider: ade3f042-7427-494b-9654-0b65e074850c {{(pid=68674) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1420.495245] env[68674]: DEBUG nova.scheduler.client.report [None req-8b29bde2-3971-4cb1-ace1-6354a7949946 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Inventory has not changed for provider ade3f042-7427-494b-9654-0b65e074850c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68674) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1421.000733] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8b29bde2-3971-4cb1-ace1-6354a7949946 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.601s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1421.020379] env[68674]: INFO nova.scheduler.client.report [None req-8b29bde2-3971-4cb1-ace1-6354a7949946 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Deleted allocations for instance cd3c4d97-180c-4942-9025-a0d3b4eceec8 [ 1421.528811] env[68674]: DEBUG oslo_concurrency.lockutils [None req-8b29bde2-3971-4cb1-ace1-6354a7949946 tempest-ServerActionsTestJSON-37825653 tempest-ServerActionsTestJSON-37825653-project-member] Lock "cd3c4d97-180c-4942-9025-a0d3b4eceec8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.679s {{(pid=68674) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1423.373644] env[68674]: DEBUG oslo_service.periodic_task [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68674) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1423.373960] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] Cleaning up deleted instances {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 1423.884935] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] There are 24 instances to clean {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 1423.885132] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: cd3c4d97-180c-4942-9025-a0d3b4eceec8] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1424.389055] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: b69391a0-57b0-469a-ac86-7f1fd3e1fad6] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1424.893073] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 2f698e5c-6be5-4747-b006-6ed6dd512f79] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1425.396815] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 531f935e-27e8-4b0d-b549-9693be7bff93] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1425.900606] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: c2728961-9f06-4494-9c48-dd096eae8b4e] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1426.403917] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: f4751bd8-e0df-4686-a22f-e51a4a98b8d6] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1426.908319] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 1b276f5a-9e53-4ef9-892b-4e4bd0dc09df] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1427.412091] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 0cbfda3e-337f-41f6-add2-1dcd725b0953] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1427.916668] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 5fa43d94-64af-4cd2-9976-ca9cd994447e] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1428.424106] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 2efe81dd-caa3-4fde-8be0-fbf399ce99e0] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1428.928094] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: c9e5a315-fc32-4fc9-9fb6-dfcc27c2e14f] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1429.432213] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 8f183286-f908-4d05-9a61-d6b1bf10dfb9] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1429.936817] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 0de73fea-8e2d-47ff-a87e-a83708f9b4ad] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1430.439383] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: c876b288-de2a-4195-bfef-88f38e219d9a] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1430.943170] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: d77d24ac-b44d-4014-83eb-f486db74ab0b] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1431.446690] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 5384c82b-a584-430f-8ef1-e2731562b5ff] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1431.950795] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 9b8aad00-0980-4752-954a-c09c9ae6f9ec] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1432.454135] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: dbbf1313-6e44-45e2-8bf6-83409f06cb4b] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1432.958819] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: ba4bfbb4-a89b-4ab6-964e-792647fd5a89] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1433.462705] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 142e8ede-90e2-47cf-a1b1-8c4fd59eed0a] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1433.966298] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 66f4ab32-ef66-4d1d-93b6-775d59ce3c41] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1434.469619] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 30731a3c-34ba-40c8-9b8f-2d867eff4f21] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1434.973417] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: e371ae6b-44fd-47ce-9c58-8981e7da5cbc] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1435.477062] env[68674]: DEBUG nova.compute.manager [None req-5589faa8-0bd6-45cf-ad2a-3a21e7d3a8df None None] [instance: 23891bad-1b63-4237-9243-78954cf67d52] Instance has had 0 of 5 cleanup attempts {{(pid=68674) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}}